Repository: withastro/compiler Branch: main Commit: 41515e454944 Files: 584 Total size: 1.8 MB Directory structure: gitextract_v3t26jmm/ ├── .changeset/ │ ├── README.md │ └── config.json ├── .devcontainer/ │ ├── Dockerfile │ └── devcontainer.json ├── .editorconfig ├── .git-blame-ignore-revs ├── .gitattributes ├── .github/ │ ├── ISSUE_TEMPLATE/ │ │ ├── ---01-bug-report.yml │ │ └── config.yml │ ├── PULL_REQUEST_TEMPLATE.md │ └── workflows/ │ ├── ci.yml │ ├── congrats.yml │ ├── issue-labeled.yml │ ├── issue-needs-repro.yml │ ├── issue-opened.yml │ └── release.yml ├── .gitignore ├── .gitpod.yml ├── .golangci.yml ├── .prettierignore ├── .vscode/ │ └── settings.json ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── SYNTAX_SPEC.md ├── biome.json ├── cmd/ │ └── astro-wasm/ │ └── astro-wasm.go ├── go.mod ├── go.sum ├── internal/ │ ├── const.go │ ├── doc.go │ ├── doctype.go │ ├── entity.go │ ├── escape.go │ ├── foreign.go │ ├── handler/ │ │ └── handler.go │ ├── hash.go │ ├── helpers/ │ │ ├── joiner.go │ │ └── js_comment_utils.go │ ├── js_scanner/ │ │ ├── js_scanner.go │ │ ├── js_scanner_test.go │ │ └── testdata/ │ │ └── fuzz/ │ │ └── FuzzHoistImport/ │ │ └── ec55358ab2929fbf4deab52587664e42682f0a6ea201a325c5c33f9d18c50456 │ ├── loc/ │ │ ├── diagnostics.go │ │ └── loc.go │ ├── node.go │ ├── parser.go │ ├── parser_test.go │ ├── print-to-source.go │ ├── printer/ │ │ ├── __printer_css__/ │ │ │ ├── scopedStyleStrategy___attribute_.snap │ │ │ ├── scopedStyleStrategy___class_.snap │ │ │ └── styles__no_frontmatter_.snap │ │ ├── __printer_js__/ │ │ │ ├── All_components.snap │ │ │ ├── Component_is_raw.snap │ │ │ ├── Component_names_A-Z.snap │ │ │ ├── Component_siblings_are_siblings.snap │ │ │ ├── Component_with_await.snap │ │ │ ├── Empty_attribute_expression.snap │ │ │ ├── Empty_expression.snap │ │ │ ├── Empty_expression_with_whitespace.snap │ │ │ ├── Empty_script.snap │ │ │ ├── Empty_style.snap │ │ │ ├── Expression_in_form_followed_by_other_sibling_forms.snap │ │ │ ├── Fragment.snap │ │ │ ├── Fragment_literal_only.snap │ │ │ ├── Fragment_shorthand.snap │ │ │ ├── Fragment_shorthand_only.snap │ │ │ ├── Fragment_shorthand_with_await.snap │ │ │ ├── Fragment_slotted.snap │ │ │ ├── Fragment_slotted_with_name.snap │ │ │ ├── Fragment_with_await.snap │ │ │ ├── Fragment_wrapping_link_with_awaited_href.snap │ │ │ ├── HTML_comment_in_component_inside_expression_I.snap │ │ │ ├── HTML_comment_in_component_inside_expression_II.snap │ │ │ ├── Mixed_style_siblings.snap │ │ │ ├── Nested_HTML_in_expressions,_wrapped_in_parens.snap │ │ │ ├── No_extra_script_tag.snap │ │ │ ├── Parser_can_handle_files___4096_chars.snap │ │ │ ├── Preserve_namespaces.snap │ │ │ ├── Preserve_namespaces_for_components.snap │ │ │ ├── Preserve_namespaces_in_expressions.snap │ │ │ ├── Preserve_slot_attribute_at_root_level_in_expression.snap │ │ │ ├── Preserve_slot_attribute_in_conditional_expression_for_custom_element.snap │ │ │ ├── Preserve_slot_attribute_in_expression_for_custom_element.snap │ │ │ ├── Preserve_slot_whitespace.snap │ │ │ ├── Preserve_slots_inside_custom-element.snap │ │ │ ├── React_framework_example.snap │ │ │ ├── SVG_styles.snap │ │ │ ├── Self-closing_components_in_head_can_have_siblings.snap │ │ │ ├── Self-closing_components_siblings_are_siblings.snap │ │ │ ├── Self-closing_formatting_elements.snap │ │ │ ├── Self-closing_formatting_elements_2.snap │ │ │ ├── Self-closing_script_in_head_works.snap │ │ │ ├── Self-closing_title.snap │ │ │ ├── Self-closing_title_II.snap │ │ │ ├── Use_of_interfaces_within_frontmatter.snap │ │ │ ├── XElement.snap │ │ │ ├── _955_ternary_slot_with_elements.snap │ │ │ ├── _955_ternary_slot_with_text.snap │ │ │ ├── advanced_svg_expression.snap │ │ │ ├── anchor_content.snap │ │ │ ├── anchor_expressions.snap │ │ │ ├── anchor_inside_expression.snap │ │ │ ├── attribute_with_template_literal.snap │ │ │ ├── attribute_with_template_literal_interpolation.snap │ │ │ ├── backtick_in_HTML_comment.snap │ │ │ ├── basic__frontmatter_.snap │ │ │ ├── basic__no_frontmatter_.snap │ │ │ ├── basic_renderHead.snap │ │ │ ├── caption_only.snap │ │ │ ├── class_and_class_list_object.snap │ │ │ ├── class_and_class_list_set.snap │ │ │ ├── class_and_class_list_simple_array.snap │ │ │ ├── class_list.snap │ │ │ ├── class_list_with_spread.snap │ │ │ ├── class_with_spread.snap │ │ │ ├── client_only_component__default_.snap │ │ │ ├── client_only_component__multiple_.snap │ │ │ ├── client_only_component__named_.snap │ │ │ ├── client_only_component__namespace_.snap │ │ │ ├── client_only_component__namespaced_default_.snap │ │ │ ├── client_only_component__namespaced_named_.snap │ │ │ ├── comment_only_expressions_are_removed_I.snap │ │ │ ├── comment_only_expressions_are_removed_II.snap │ │ │ ├── comment_only_expressions_are_removed_III.snap │ │ │ ├── comments_removed_from_attribute_list.snap │ │ │ ├── complex_nested_template_literal_expression.snap │ │ │ ├── complex_recursive_component.snap │ │ │ ├── complex_table.snap │ │ │ ├── component.snap │ │ │ ├── component_in_expression_with_its_child_expression_before_its_child_element.snap │ │ │ ├── component_with_only_a_script.snap │ │ │ ├── component_with_quoted_attributes.snap │ │ │ ├── condition_expressions_at_the_top-level.snap │ │ │ ├── condition_expressions_at_the_top-level_with_head_content.snap │ │ │ ├── conditional_iframe.snap │ │ │ ├── conditional_noscript.snap │ │ │ ├── conditional_render.snap │ │ │ ├── conditional_rendering_of_title_containing_expression.snap │ │ │ ├── conditional_slot.snap │ │ │ ├── css_comment_doesn’t_produce_semicolon.snap │ │ │ ├── css_imports_are_not_included_in_module_metadata.snap │ │ │ ├── custom-element.snap │ │ │ ├── custom_elements.snap │ │ │ ├── define_vars_on_a_module_script_with_imports.snap │ │ │ ├── define_vars_on_non-root_elements.snap │ │ │ ├── define_vars_on_script_with_StaticExpression_turned_on.snap │ │ │ ├── define_vars_on_style.snap │ │ │ ├── define_vars_on_style_tag_with_style_empty_attribute_on_element.snap │ │ │ ├── define_vars_on_style_tag_with_style_expression_attribute_on_element.snap │ │ │ ├── define_vars_on_style_tag_with_style_quoted_attribute_on_element.snap │ │ │ ├── define_vars_on_style_tag_with_style_shorthand_attribute_on_element.snap │ │ │ ├── define_vars_on_style_tag_with_style_template_literal_attribute_on_element.snap │ │ │ ├── division_inside_expression.snap │ │ │ ├── doctype.snap │ │ │ ├── dot_component.snap │ │ │ ├── dynamic_import.snap │ │ │ ├── escaped_entity.snap │ │ │ ├── export_comments_I.snap │ │ │ ├── export_comments_II.snap │ │ │ ├── export_member_does_not_panic.snap │ │ │ ├── expression_returning_multiple_elements.snap │ │ │ ├── expression_slot.snap │ │ │ ├── expression_with_leading_whitespace.snap │ │ │ ├── expressions_with_JS_comments.snap │ │ │ ├── expressions_with_multiple_curly_braces.snap │ │ │ ├── function_expression_slots_I.snap │ │ │ ├── function_expression_slots_II___959_.snap │ │ │ ├── getStaticPaths__basic_.snap │ │ │ ├── getStaticPaths__hoisted_.snap │ │ │ ├── getStaticPaths__hoisted_II_.snap │ │ │ ├── gets_all_potential_hydrated_components.snap │ │ │ ├── head_content_with_component_first.snap │ │ │ ├── head_expression.snap │ │ │ ├── head_expression_and_conditional_rendering_of_fragment.snap │ │ │ ├── head_inside_slot.snap │ │ │ ├── head_slot.snap │ │ │ ├── head_slot_II.snap │ │ │ ├── head_slot_III.snap │ │ │ ├── html5_boilerplate.snap │ │ │ ├── iframe.snap │ │ │ ├── import.meta.env.snap │ │ │ ├── import.meta.snap │ │ │ ├── import_assertions.snap │ │ │ ├── import_order.snap │ │ │ ├── import_to_identifier_named_assert.snap │ │ │ ├── includes_comments_for_expression_attribute.snap │ │ │ ├── includes_comments_for_shorthand_attribute.snap │ │ │ ├── is_raw.snap │ │ │ ├── jsx_comment_between_doctype_and_html.snap │ │ │ ├── map_basic.snap │ │ │ ├── map_nested.snap │ │ │ ├── map_with_component.snap │ │ │ ├── map_without_component.snap │ │ │ ├── maybeRenderHead_not_printed_for_hoisted_scripts.snap │ │ │ ├── multibyte_character___script.snap │ │ │ ├── multibyte_character___style.snap │ │ │ ├── multibyte_characters.snap │ │ │ ├── multiline_class_attribute_on_component.snap │ │ │ ├── multiple_define_vars_on_style.snap │ │ │ ├── namespace_is_preserved_when_inside_an_expression.snap │ │ │ ├── nested_expressions.snap │ │ │ ├── nested_expressions_II.snap │ │ │ ├── nested_expressions_III.snap │ │ │ ├── nested_expressions_IV.snap │ │ │ ├── nested_expressions_V.snap │ │ │ ├── nested_expressions_VI.snap │ │ │ ├── nested_expressions_VII.snap │ │ │ ├── nested_expressions_VIII.snap │ │ │ ├── nested_head_content_stays_in_the_head.snap │ │ │ ├── nested_template_literal_expression.snap │ │ │ ├── no_expressions_in_math.snap │ │ │ ├── noscript_component.snap │ │ │ ├── noscript_deep_styles.snap │ │ │ ├── noscript_only.snap │ │ │ ├── noscript_styles.snap │ │ │ ├── orphan_slot.snap │ │ │ ├── passes_escaped_filename_into_createComponent_if_it_contains_single_quotes.snap │ │ │ ├── passes_filename_into_createComponent_if_passed_into_the_compiler_options.snap │ │ │ ├── preserve_is_inline_slot.snap │ │ │ ├── preserve_is_inline_slot_II.snap │ │ │ ├── script.snap │ │ │ ├── script_before_elements.snap │ │ │ ├── script_define_vars_I.snap │ │ │ ├── script_define_vars_II.snap │ │ │ ├── script_external.snap │ │ │ ├── script_external_in_expression.snap │ │ │ ├── script_hoist_with_frontmatter.snap │ │ │ ├── script_hoist_without_frontmatter.snap │ │ │ ├── script_in__head_.snap │ │ │ ├── script_in_expression.snap │ │ │ ├── script_inline.snap │ │ │ ├── script_mixed_handled_and_inline.snap │ │ │ ├── script_multiple.snap │ │ │ ├── scriptinline.snap │ │ │ ├── select_in_form.snap │ │ │ ├── select_map_expression.snap │ │ │ ├── select_nested_option.snap │ │ │ ├── select_option_expression.snap │ │ │ ├── selectedcontent_element_in_customizable_select.snap │ │ │ ├── selectedcontent_self-closing_element.snap │ │ │ ├── self-closing_td.snap │ │ │ ├── set_html.snap │ │ │ ├── set_html_and_set_text.snap │ │ │ ├── set_html_on_Component.snap │ │ │ ├── set_html_on_Component_with_quoted_attribute.snap │ │ │ ├── set_html_on_Component_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_Component_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_Fragment.snap │ │ │ ├── set_html_on_Fragment_with_quoted_attribute.snap │ │ │ ├── set_html_on_Fragment_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_Fragment_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_custom-element.snap │ │ │ ├── set_html_on_custom-element_with_quoted_attribute.snap │ │ │ ├── set_html_on_custom-element_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_custom-element_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_empty_tag.snap │ │ │ ├── set_html_on_empty_tag_with_quoted_attribute.snap │ │ │ ├── set_html_on_empty_tag_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_empty_tag_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_script.snap │ │ │ ├── set_html_on_script_with_quoted_attribute.snap │ │ │ ├── set_html_on_script_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_script_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_self-closing_tag.snap │ │ │ ├── set_html_on_self-closing_tag_with_quoted_attribute.snap │ │ │ ├── set_html_on_self-closing_tag_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_self-closing_tag_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_style.snap │ │ │ ├── set_html_on_style_with_quoted_attribute.snap │ │ │ ├── set_html_on_style_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_style_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_tag_with_children.snap │ │ │ ├── set_html_on_tag_with_children_and_quoted_attribute.snap │ │ │ ├── set_html_on_tag_with_children_and_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_tag_with_children_and_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_on_tag_with_empty_whitespace.snap │ │ │ ├── set_html_on_tag_with_empty_whitespace_and_quoted_attribute.snap │ │ │ ├── set_html_on_tag_with_empty_whitespace_and_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_on_tag_with_empty_whitespace_and_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_with_other_attributes.snap │ │ │ ├── set_html_with_quoted_attribute.snap │ │ │ ├── set_html_with_quoted_attribute_and_other_attributes.snap │ │ │ ├── set_html_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_html_with_template_literal_attribute_with_variable_and_other_attributes.snap │ │ │ ├── set_html_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_html_with_template_literal_attribute_without_variable_and_other_attributes.snap │ │ │ ├── set_text.snap │ │ │ ├── set_text_on_Component.snap │ │ │ ├── set_text_on_Component_with_quoted_attribute.snap │ │ │ ├── set_text_on_Component_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_text_on_Component_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_text_on_custom-element.snap │ │ │ ├── set_text_on_custom-element_with_quoted_attribute.snap │ │ │ ├── set_text_on_custom-element_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_text_on_custom-element_with_template_literal_attribute_without_variable.snap │ │ │ ├── set_text_with_quoted_attribute.snap │ │ │ ├── set_text_with_template_literal_attribute_with_variable.snap │ │ │ ├── set_text_with_template_literal_attribute_without_variable.snap │ │ │ ├── sibling_expressions.snap │ │ │ ├── simple_ternary.snap │ │ │ ├── slot_inside_of_Base.snap │ │ │ ├── slot_with_fallback.snap │ │ │ ├── slot_with_fallback_II.snap │ │ │ ├── slot_with_fallback_III.snap │ │ │ ├── slot_with_quoted_attributes.snap │ │ │ ├── slots__basic_.snap │ │ │ ├── slots__dynamic_name_.snap │ │ │ ├── slots__expression_.snap │ │ │ ├── slots__named_only_.snap │ │ │ ├── slots__no_comments_.snap │ │ │ ├── small_expression.snap │ │ │ ├── solidus_in_template_literal_expression.snap │ │ │ ├── spread_with_double_quotation_marks.snap │ │ │ ├── spread_with_style_but_no_explicit_class.snap │ │ │ ├── spread_without_style_or_class.snap │ │ │ ├── styles__no_frontmatter_.snap │ │ │ ├── svg_expressions.snap │ │ │ ├── table.snap │ │ │ ├── table_II.snap │ │ │ ├── table_III.snap │ │ │ ├── table_IV.snap │ │ │ ├── table_caption_expression.snap │ │ │ ├── table_expression_with_trailing_div.snap │ │ │ ├── table_expressions__no_implicit_tbody_.snap │ │ │ ├── table_simple_case.snap │ │ │ ├── table_slot_I.snap │ │ │ ├── table_slot_II.snap │ │ │ ├── table_slot_III.snap │ │ │ ├── table_slot_IV.snap │ │ │ ├── table_slot_V.snap │ │ │ ├── table_with_expression_in__th_.snap │ │ │ ├── tbody_expressions.snap │ │ │ ├── tbody_expressions_2.snap │ │ │ ├── tbody_expressions_3.snap │ │ │ ├── td_expressions.snap │ │ │ ├── td_expressions_II.snap │ │ │ ├── template_literal_attribute_on_component.snap │ │ │ ├── template_literal_attribute_with_variable_on_component.snap │ │ │ ├── ternary_component.snap │ │ │ ├── ternary_layout.snap │ │ │ ├── ternary_slot.snap │ │ │ ├── text_after_title_expression.snap │ │ │ ├── text_after_title_expressions.snap │ │ │ ├── text_only.snap │ │ │ ├── textarea.snap │ │ │ ├── textarea_in_form.snap │ │ │ ├── textarea_inside_expression.snap │ │ │ ├── th_expressions.snap │ │ │ ├── tr_only.snap │ │ │ ├── trailing_expression.snap │ │ │ ├── transition_animate_on_Component.snap │ │ │ ├── transition_animate_with_an_expression.snap │ │ │ ├── transition_name_with_an_expression.snap │ │ │ ├── transition_name_with_an_template_literal.snap │ │ │ ├── transition_persist-props_converted_to_a_data_attribute.snap │ │ │ ├── transition_persist_converted_to_a_data_attribute.snap │ │ │ ├── transition_persist_uses_transition_name_if_defined.snap │ │ │ ├── type_import.snap │ │ │ ├── unusual_line_terminator_I.snap │ │ │ ├── unusual_line_terminator_II.snap │ │ │ └── user-defined__implicit__is_printed.snap │ │ ├── __printer_json__/ │ │ │ ├── Comment.snap │ │ │ ├── Comment_preserves_whitespace.snap │ │ │ ├── Component.snap │ │ │ ├── Doctype.snap │ │ │ ├── Fragment_Literal.snap │ │ │ ├── Fragment_Shorthand.snap │ │ │ ├── Frontmatter.snap │ │ │ ├── JSON_escape.snap │ │ │ ├── Preserve_namespaces.snap │ │ │ ├── basic.snap │ │ │ ├── custom-element.snap │ │ │ ├── element_with_unterminated_double_quote_attribute.snap │ │ │ ├── element_with_unterminated_single_quote_attribute.snap │ │ │ ├── element_with_unterminated_template_literal_attribute.snap │ │ │ ├── expression.snap │ │ │ ├── jsx_comment_between_doctype_and_html.snap │ │ │ ├── style_after_body_with_component_in_head_and_body.snap │ │ │ ├── style_after_empty_html.snap │ │ │ ├── style_after_html.snap │ │ │ ├── style_after_html_with_component_in_head.snap │ │ │ ├── style_after_html_with_component_in_head_and_body.snap │ │ │ ├── style_before_html.snap │ │ │ ├── style_in_body.snap │ │ │ └── style_in_html.snap │ │ ├── print-css.go │ │ ├── print-to-js.go │ │ ├── print-to-json.go │ │ ├── print-to-tsx.go │ │ ├── print-to-tsx_test.go │ │ ├── printer.go │ │ ├── printer_css_test.go │ │ ├── printer_test.go │ │ └── utils.go │ ├── sourcemap/ │ │ └── sourcemap.go │ ├── t/ │ │ └── t.go │ ├── test_utils/ │ │ └── test_utils.go │ ├── token.go │ ├── token_test.go │ ├── transform/ │ │ ├── scope-css.go │ │ ├── scope-css_test.go │ │ ├── scope-html.go │ │ ├── scope-html_test.go │ │ ├── transform.go │ │ ├── transform_test.go │ │ └── utils.go │ └── xxhash/ │ ├── LICENSE.txt │ ├── xxhash.go │ └── xxhash_other.go ├── internal_wasm/ │ └── utils/ │ └── utils.go ├── lib/ │ └── esbuild/ │ ├── LICENSE.md │ ├── ast/ │ │ └── ast.go │ ├── compat/ │ │ ├── compat.go │ │ ├── css_table.go │ │ └── js_table.go │ ├── config/ │ │ ├── config.go │ │ └── globals.go │ ├── css_ast/ │ │ ├── css_ast.go │ │ └── css_decl_table.go │ ├── css_lexer/ │ │ ├── css_lexer.go │ │ └── css_lexer_test.go │ ├── css_parser/ │ │ ├── css_decls.go │ │ ├── css_decls_border_radius.go │ │ ├── css_decls_box.go │ │ ├── css_decls_box_shadow.go │ │ ├── css_decls_color.go │ │ ├── css_decls_font.go │ │ ├── css_decls_font_family.go │ │ ├── css_decls_font_weight.go │ │ ├── css_decls_transform.go │ │ ├── css_parser.go │ │ ├── css_parser_selector.go │ │ ├── css_parser_test.go │ │ └── css_reduce_calc.go │ ├── css_printer/ │ │ ├── astro_features.go │ │ ├── css_printer.go │ │ └── css_printer_test.go │ ├── esbuild.go │ ├── helpers/ │ │ ├── bitset.go │ │ ├── comment.go │ │ ├── hash.go │ │ ├── joiner.go │ │ ├── mime.go │ │ ├── path.go │ │ ├── serializer.go │ │ ├── stack.go │ │ ├── timer.go │ │ ├── typos.go │ │ └── utf.go │ ├── logger/ │ │ ├── logger.go │ │ ├── logger_darwin.go │ │ ├── logger_linux.go │ │ ├── logger_other.go │ │ └── logger_windows.go │ ├── sourcemap/ │ │ └── sourcemap.go │ └── test/ │ ├── diff.go │ └── util.go ├── package.json ├── packages/ │ └── compiler/ │ ├── .gitignore │ ├── CHANGELOG.md │ ├── README.md │ ├── package.json │ ├── src/ │ │ ├── browser/ │ │ │ ├── index.ts │ │ │ ├── utils.ts │ │ │ └── wasm_exec.ts │ │ ├── node/ │ │ │ ├── index.ts │ │ │ ├── sync.ts │ │ │ ├── utils.ts │ │ │ └── wasm_exec.ts │ │ └── shared/ │ │ ├── ast.ts │ │ ├── diagnostics.ts │ │ └── types.ts │ ├── sync.d.ts │ ├── test/ │ │ ├── bad-styles/ │ │ │ ├── sass.ts │ │ │ └── unclosed-style.ts │ │ ├── basic/ │ │ │ ├── body-after-head-component.ts │ │ │ ├── body-expression.ts │ │ │ ├── comment.ts │ │ │ ├── component-metadata/ │ │ │ │ └── index.ts │ │ │ ├── component-name.ts │ │ │ ├── export.ts │ │ │ ├── expression-then-node.ts │ │ │ ├── expressions.ts │ │ │ ├── fragment.ts │ │ │ ├── get-static-paths.ts │ │ │ ├── head-injection.ts │ │ │ ├── lt-gt-text.ts │ │ │ ├── null-chars.ts │ │ │ ├── props-interface.ts │ │ │ ├── script-before-html.ts │ │ │ ├── script-fragment.ts │ │ │ ├── top-level-expressions.ts │ │ │ ├── trailing-newline.ts │ │ │ ├── trailing-space.ts │ │ │ └── trailing-spaces-ii.ts │ │ ├── client-directive/ │ │ │ ├── special-characters.ts │ │ │ └── warn.ts │ │ ├── compact/ │ │ │ └── minify.ts │ │ ├── css-order/ │ │ │ ├── astro-styles.ts │ │ │ └── imported-styles.ts │ │ ├── errors/ │ │ │ ├── client-only-unfound.ts │ │ │ ├── define-vars.ts │ │ │ ├── fragment-shorthand.ts │ │ │ ├── html-comment.ts │ │ │ ├── invalid-spread.ts │ │ │ ├── jsx-comment.ts │ │ │ └── missing-frontmatter-fence.ts │ │ ├── head-metadata/ │ │ │ ├── with-head.ts │ │ │ └── without-head.ts │ │ ├── js-sourcemaps/ │ │ │ ├── complex-frontmatter.ts │ │ │ ├── deprecated.ts │ │ │ ├── error.ts │ │ │ ├── frontmatter.ts │ │ │ ├── hover.ts │ │ │ ├── module.ts │ │ │ ├── script.ts │ │ │ ├── template.ts │ │ │ └── windows-linereturns.ts │ │ ├── parse/ │ │ │ ├── ast.ts │ │ │ ├── client-component-unfound.ts │ │ │ ├── escaping.ts │ │ │ ├── fragment.ts │ │ │ ├── literal.ts │ │ │ ├── multibyte-characters.ts │ │ │ ├── orphan-head.ts │ │ │ ├── orphan-slot.ts │ │ │ ├── position.ts │ │ │ └── serialize.ts │ │ ├── resolve-path/ │ │ │ └── preserve.ts │ │ ├── scope/ │ │ │ └── same-source.ts │ │ ├── scripts/ │ │ │ ├── isinline-hint.ts │ │ │ └── order.ts │ │ ├── server-islands/ │ │ │ └── meta.ts │ │ ├── slot-result/ │ │ │ └── result.ts │ │ ├── static-extraction/ │ │ │ ├── css.ts │ │ │ └── hoist-expression.ts │ │ ├── stress/ │ │ │ └── index.ts │ │ ├── styles/ │ │ │ ├── define-vars.ts │ │ │ ├── emit-scope.ts │ │ │ ├── empty-style.ts │ │ │ ├── hash.ts │ │ │ └── sass.ts │ │ ├── table/ │ │ │ ├── components.ts │ │ │ ├── expressions.ts │ │ │ └── in-expression.ts │ │ ├── teardown/ │ │ │ └── parse.ts │ │ ├── transition/ │ │ │ ├── data-astro.ts │ │ │ └── meta.ts │ │ ├── tsx/ │ │ │ ├── basic.ts │ │ │ ├── comment-whitespace.ts │ │ │ ├── complex-generics.ts │ │ │ ├── escape.ts │ │ │ ├── line-terminator.ts │ │ │ ├── literal-style-tag.ts │ │ │ ├── meta.ts │ │ │ ├── nested-generics.ts │ │ │ ├── non-latin.ts │ │ │ ├── props-and-getStaticPaths.ts │ │ │ ├── props.ts │ │ │ ├── raw.ts │ │ │ ├── script.ts │ │ │ └── top-level-returns.ts │ │ ├── tsx-errors/ │ │ │ ├── eof.ts │ │ │ ├── fragment-shorthand.ts │ │ │ └── unfinished-component.ts │ │ ├── tsx-sourcemaps/ │ │ │ ├── 404.ts │ │ │ ├── attributes.ts │ │ │ ├── deprecated.ts │ │ │ ├── error.ts │ │ │ ├── frontmatter.ts │ │ │ ├── hover.ts │ │ │ ├── module.ts │ │ │ ├── multibyte.ts │ │ │ ├── script.ts │ │ │ ├── tags.ts │ │ │ ├── template-windows.ts │ │ │ ├── template.ts │ │ │ └── unfinished-literal.ts │ │ └── utils.ts │ ├── tsconfig.json │ ├── tsup.config.ts │ ├── types.d.ts │ └── utils.d.ts └── pnpm-workspace.yaml ================================================ FILE CONTENTS ================================================ ================================================ FILE: .changeset/README.md ================================================ # Changesets Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works with multi-package repos, or single-package repos to help you version and publish your code. You can find the full documentation for it [in our repository](https://github.com/changesets/changesets) We have a quick list of common questions to get you started engaging with this project in [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) ================================================ FILE: .changeset/config.json ================================================ { "$schema": "https://unpkg.com/@changesets/config@1.6.1/schema.json", "changelog": "@changesets/cli/changelog", "commit": false, "linked": [], "access": "public", "baseBranch": "next", "updateInternalDependencies": "patch", "ignore": [] } ================================================ FILE: .devcontainer/Dockerfile ================================================ # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.195.0/containers/go/.devcontainer/base.Dockerfile # [Choice] Go version (use -bullseye variants on local arm64/Apple Silicon): 1, 1.16, 1.17, 1-bullseye, 1.16-bullseye, 1.17-bullseye, 1-buster, 1.16-buster, 1.17-buster ARG VARIANT=1-bullseye FROM mcr.microsoft.com/devcontainers/go:0-${VARIANT} # [Choice] Node.js version: lts/*, 16, 14, 12, 10 ARG NODE_VERSION="lts/*" RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c ". /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi # [Optional] Uncomment this section to install additional OS packages. # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ # && apt-get -y install --no-install-recommends # [Optional] Uncomment the next line to use go get to install anything else you need # RUN go get -x # [Optional] Uncomment this line to install global node packages. RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g pnpm" 2>&1 ================================================ FILE: .devcontainer/devcontainer.json ================================================ // For format details, see https://aka.ms/vscode-remote/devcontainer.json or this file's README at: // https://github.com/microsoft/vscode-dev-containers/tree/v0.195.0/containers/go { "name": "Go", "build": { "dockerfile": "Dockerfile", "args": { // Update the VARIANT arg to pick a version of Go: 1, 1.16, 1.17 // Append -bullseye or -buster to pin to an OS version. // Use -bullseye variants on local arm64/Apple Silicon. "VARIANT": "1.19", // Options "NODE_VERSION": "16" } }, "runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"], // Configure tool-specific properties. "customizations": { "vscode": { "settings": { "go.toolsManagement.checkForUpdates": "local", "go.useLanguageServer": true, "go.gopath": "/go", "go.goroot": "/usr/local/go" }, "extensions": ["golang.Go"] } }, "remoteUser": "vscode" } ================================================ FILE: .editorconfig ================================================ root = true [*.go] indent_style = tab indent_size = 2 trim_trailing_whitespace = true ================================================ FILE: .git-blame-ignore-revs ================================================ # chore: format everything (#1020) 402060270f50fe273d5e7387241d7eb36f99ca11 ================================================ FILE: .gitattributes ================================================ # Files with the language manually specified, sorted alphabetically. /internal/**/*.snap linguist-language=Markdown ================================================ FILE: .github/ISSUE_TEMPLATE/---01-bug-report.yml ================================================ name: "\U0001F41B Bug Report" description: Report an issue or possible bug title: "\U0001F41B BUG:" labels: [] assignees: [] body: - type: markdown attributes: value: | ## Quick Checklist Thank you for taking the time to file a bug report! Please fill out this form as completely as possible. ✅ I am using the **latest version of Astro** and all plugins. ✅ I am using a version of Node that supports ESM (`v12.20.0+`, `v14.13.1+`, or `v16.0.0+`) - type: input attributes: label: What version of `@astrojs/compiler` are you using? placeholder: 0.0.0 validations: required: true - type: input attributes: label: What package manager are you using? placeholder: npm, yarn, pnpm validations: required: true - type: input attributes: label: What operating system are you using? placeholder: Mac, Windows, Linux validations: required: true - type: textarea attributes: label: Describe the Bug description: A clear and concise description of what the bug is. validations: required: true - type: input attributes: label: Link to Minimal Reproducible Example description: 'Please use the following link to create a reproduction: https://astro.new' placeholder: 'https://stackblitz.com/abcd1234' validations: required: true ================================================ FILE: .github/ISSUE_TEMPLATE/config.yml ================================================ blank_issues_enabled: false contact_links: - name: 🏡 Main Astro Repo url: https://github.com/withastro/astro about: Unsure if your bug is related to the compiler? Please open an issue in the main Astro repo! - name: 👾 Chat url: https://astro.build/chat about: Our Discord server is active, come join us! - name: 💁 Support url: https://astro.build/chat about: 'This issue tracker is not for support questions. Join us on Discord for assistance!' ================================================ FILE: .github/PULL_REQUEST_TEMPLATE.md ================================================ ## Changes - What does this change? - Be short and concise. Bullet points can help! - Before/after screenshots can be helpful as well. ## Testing ## Docs ================================================ FILE: .github/workflows/ci.yml ================================================ name: Test on: workflow_dispatch: push: branches: ['main', 'next'] pull_request: branches: ['main', 'next'] # Automatically cancel in-progress actions on the same branch concurrency: group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.ref }} cancel-in-progress: true jobs: test: timeout-minutes: 3 runs-on: ubuntu-latest steps: - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Set up Go uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 with: go-version: 1.22 - name: Test run: go test -v -timeout 30s ./internal/... test-wasm: timeout-minutes: 10 strategy: matrix: OS: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.OS }} steps: - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Set up Go uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 with: go-version: 1.22 - name: Set up PNPM uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 - name: Set up Node.js uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 24 cache: pnpm - name: Build WASM run: make wasm - name: Install NPM Dependencies run: pnpm install env: CI: true - name: Build JS run: pnpm run build:compiler - name: Test WASM run: pnpm test:ci lint: timeout-minutes: 3 runs-on: ubuntu-latest steps: - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: golangci-lint uses: golangci/golangci-lint-action@d6238b002a20823d52840fda27e2d4891c5952dc # v4.0.1 with: version: latest env: GOOS: js GOARCH: wasm lint-js: timeout-minutes: 3 runs-on: ubuntu-latest steps: - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 - name: Setup Biome uses: biomejs/setup-biome@454fa0d884737805f48d7dc236c1761a0ac3cc13 # v2.6.0 - name: Run linting run: biome ci --diagnostic-level=warn ================================================ FILE: .github/workflows/congrats.yml ================================================ name: Congratsbot on: push: branches: [main] jobs: congrats: if: ${{ github.repository_owner == 'withastro' }} uses: withastro/automation/.github/workflows/congratsbot.yml@main with: EMOJIS: '⚙️,🔩,🔧,🛠️,🧰,🗜️,🦺,<:lgtm:1121889033602215966>' secrets: DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_CONGRATS }} ================================================ FILE: .github/workflows/issue-labeled.yml ================================================ name: Issue Labeled on: issues: types: [labeled] jobs: reply_labelled: if: github.repository_owner == 'withastro' uses: withastro/automation/.github/workflows/issue-labeled.yml@main ================================================ FILE: .github/workflows/issue-needs-repro.yml ================================================ name: Close Issues (needs repro) on: schedule: - cron: "0 0 * * *" jobs: close-issues: if: github.repository == 'withastro/compiler' runs-on: ubuntu-latest steps: - name: needs repro uses: actions-cool/issues-helper@9861779a695cf1898bd984c727f685f351cfc372 # v3.7.2 with: actions: "close-issues" token: ${{ secrets.GITHUB_TOKEN }} labels: "needs repro" inactive-day: 3 ================================================ FILE: .github/workflows/issue-opened.yml ================================================ name: Label issues on: issues: types: - reopened - opened jobs: label_issues: if: github.repository_owner == 'withastro' uses: withastro/automation/.github/workflows/issue-opened.yml@main ================================================ FILE: .github/workflows/release.yml ================================================ name: Release permissions: {} on: # We trigger the release job when a commit is pushed to these branches push: branches: [main, next] # We trigger the snapshot-release job for comments on PRs issue_comment: types: [created] jobs: release: timeout-minutes: 3 if: github.repository_owner == 'withastro' && github.event_name == 'push' name: Changelog runs-on: ubuntu-latest permissions: contents: write pull-requests: write id-token: write steps: - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false - name: Set up Go uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 with: go-version: 1.22 - name: Set up PNPM uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 - name: Set up Node.js uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 24.11.1 cache: pnpm - name: Build WASM run: make wasm - name: Install NPM Dependencies run: pnpm install - name: Build JS run: pnpm run build:compiler - name: Create Release Pull Request or Publish to npm uses: changesets/action@e0145edc7d9d8679003495b11f87bd8ef63c0cba # v1.5.3 with: publish: pnpm run release commit: "[ci] release" title: "[ci] release" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} NPM_TOKEN: "" # See https://github.com/changesets/changesets/issues/1152#issuecomment-3190884868 snapshot-release: timeout-minutes: 3 name: Create a snapshot release of a pull request if: github.repository_owner == 'withastro' && github.event_name == 'issue_comment' && github.event.issue.pull_request && startsWith(github.event.comment.body, '!preview') runs-on: ubuntu-latest permissions: contents: write pull-requests: write id-token: write defaults: run: shell: bash steps: - name: Check if user has admin access (only admins can publish snapshot releases). id: checkAccess uses: actions-cool/check-user-permission@7b90a27f92f3961b368376107661682c441f6103 # v2.3.0 with: require: admin username: ${{ github.triggering_actor }} # if the user does not have the required permission, we should return exit code 1 to stop the workflow - name: Check user permission if: steps.checkAccess.outputs.require-result == 'false' run: | echo "${{ github.triggering_actor }} does not have permissions on this repo." echo "Current permission level is ${{ steps.checkAccess.outputs.user-permission }}" echo "Job originally triggered by ${{ github.actor }}" exit 1 - name: Extract the snapshot name from comment body id: getSnapshotName uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 with: script: | const splitComment = context.payload.comment.body.split(' '); splitComment.length !== 2 && (github.rest.issues.createComment({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, body: 'Invalid comment format. Expected: "!preview "', }) || core.setFailed('Invalid comment format. Expected: "!preview "')); return splitComment[1].trim(); result-encoding: string - name: resolve pr refs id: refs uses: eficode/resolve-pr-refs@f7e14e739786aae2053e162c678cd4c3c2edaa83 # v0.0.4 with: token: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1 with: persist-credentials: false ref: ${{ steps.refs.outputs.head_ref }} - name: Set up Go uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 with: go-version: 1.22 - name: Setup PNPM uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 - name: Setup Node uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 24.11.1 cache: pnpm - name: Install dependencies run: pnpm install - name: Build Packages run: pnpm run build:all - name: Bump Package Versions run: | pnpm exec changeset version --snapshot ${{ steps.getSnapshotName.outputs.result }} > changesets.output.txt 2>&1 echo ::set-output name=result::`cat changesets.output.txt` env: # Needs access to run the script GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Publish Release id: publish run: | pnpm run release --tag next--${{ steps.getSnapshotName.outputs.result }} > publish.output.txt 2>&1 echo ::set-output name=result::`cat publish.output.txt` env: NPM_TOKEN: "" # See https://github.com/changesets/changesets/issues/1152#issuecomment-3190884868 - name: Pull Request Notification uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 env: MESSAGE: ${{ steps.publish.outputs.result }} with: script: | console.log(process.env.MESSAGE); github.rest.issues.createComment({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, body: '```\n' + process.env.MESSAGE + '\n```', }) ================================================ FILE: .gitignore ================================================ .DS_Store .pnpm-store node_modules *.wasm /astro debug.test __debug_bin packages/compiler/sourcemap.mjs ================================================ FILE: .gitpod.yml ================================================ # This configuration file was automatically generated by Gitpod. # Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file) # and commit this file to your remote git repository to share the goodness with others. tasks: - init: pnpm install && pnpm run build && go get && go build ./... && go test ./... && make command: go run . ================================================ FILE: .golangci.yml ================================================ issues: exclude-dirs: - lib exclude-files: - xxhash.go exclude-rules: - path: token.go linters: - errcheck - path: escape.go linters: - errcheck - linters: - staticcheck text: "SA9003" ================================================ FILE: .prettierignore ================================================ **/dist pnpm-lock.yaml ================================================ FILE: .vscode/settings.json ================================================ { "go.toolsEnvVars": { "GOOS": "js", "GOARCH": "wasm" }, "editor.unusualLineTerminators": "off" } ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing Contributions are welcome to the Go compiler! ## Setup ### Go [Go][go] `1.20+` is needed to work with this repo. On Macs, installing via [Homebrew][homebrew] is recommended: `brew install go`. For Windows & Linux, you can [follow Go’s installation guide][go] if you don’t have your own preferred method of package installation. If you use VS Code as your primary editor, installing the [Go extension][go-vscode] is highly recommended. ### Node You will also need [Node.js][node] installed, as well as PNPM 8.x (`npm i -g pnpm`). More often than not, you won’t need to touch JS in this repo, but in case you do, be sure to run `pnpm install` first. ## Code Structure A simple explanation of the compiler process is: 1. Tokenizes (`internal/token.go`) 2. Scans (`internal/js_scanner.go`) 3. Prints (`internal/printer/print-to-js.go`) **Tokenizing** takes the raw `.astro` text and turns it into simple tokens such as `FrontmatterStart`, `FrontmatterEnd`, `TagStart`, `TagEnd`, etc. **Scanning** does a basic scanning of the JS to pull out imports after the tokenizer has made it clear where JS begins and ends. **Printing** takes all the output up till now and generates (prints) valid TypeScript that can be executed within Node. When adding a new feature or debugging an issue, start at the tokenizer, then move onto the scanner, and finally end at the printer. By starting at the lowest level of complexity (tokenizer), it will be easier to reason about. ## Tests It's important to **run the test from the root of the project**. Doing so, `go` will load all the necessary global information needed to run the tests. ### Run all tests ```shell go test -v ./internal/... ``` ### Run a specific test suite ```shell go test -v ./internal/printer ``` ### Run a specific test case Many of our test cases are designed like this: ```go func TestPrintToJSON(t *testing.T) { tests := []jsonTestcase{ { name: "basic", source: `

Hello world!

`, want: []ASTNode{{Type: "element", Name: "h1", Children: []ASTNode{{Type: "text", Value: "Hello world!"}}}}, }, { name: "Comment preserves whitespace", source: ``, want: []ASTNode{{Type: "comment", Value: " hello "}}, } } } ``` In this particular instance, the test case is name of the function, a slash `/`, followed by the `name` field. If the test case has spaces, you can use them. ```shell go test -v ./internal/... -run TestPrintToJSON/basic go test -v ./internal/... -run TestPrintToJSON/Comment preserves whitespace ``` #### Snapshot testing We use [go-snaps](https://github.com/gkampitakis/go-snaps) for snapshot testing. Visit their repository for more details on how to use it #### Update snapshots Some of our tests use snapshot tests. If some of you changes are expected to update some snapshot tests, you can use the environment variable `UPDATE_SNAPS` to do so: ```shell UPDATE_SNAPS=true go test -v ./internal/... ``` Instead, if there are some **obsolete snapshots**, you can `UPDATE_SNAPS=clean`: ```shell UPDATE_SNAPS=clean go test -v ./internal/... ``` ### Adding new test cases The printer tests emit only snapshots. Go to `printer_test.go` and add a new test case: ```go { name: "New name for this test" code: "
" } ``` Then run the below command, and a new snapshot named `new_name_for_this_test.snap` should appear in the snapshot folder. ```shell go test -v ./internal/printer/printer_test.go ``` Other tests, like tokenizer and scanner be found in `internal/token_test.go`, `internal/js_scanner_test.go` and respectively. Those tests don't emit any snapshot, and you'll have to add a `want` field: ```go { name: "New name for this test" code: "
", want: want{ code: "
" } } ``` [homebrew]: https://brew.sh/ [go]: https://golang.org/ [go-vscode]: https://marketplace.visualstudio.com/items?itemName=golang.go [node]: https://nodejs.org/ ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2021 [Astro contributors](https://github.com/withastro/compiler/graphs/contributors) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ This license applies to parts of the `internal/` subdirectory originating from the https://cs.opensource.google/go/x/net/+/master:html/ repository: Copyright (c) 2009 The Go Authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================ FILE: Makefile ================================================ GO_FLAGS += "-ldflags=-s -w" # Avoid embedding the build path in the executable for more reproducible builds GO_FLAGS += -trimpath wasm: internal/*/*.go go.mod CGO_ENABLED=0 GOOS=js GOARCH=wasm go build $(GO_FLAGS) -o ./packages/compiler/wasm/astro.wasm ./cmd/astro-wasm/astro-wasm.go publish-node: make wasm cd packages/compiler && pnpm run build clean: git clean -dxf ================================================ FILE: SYNTAX_SPEC.md ================================================ # The `.astro` File Format - Syntax Specification **Version:** 1.0 **Status:** Draft **Date:** 2026-02-03 --- ## Table of Contents 1. [File Structure](#1-file-structure) 2. [Component Script (Frontmatter)](#2-component-script-frontmatter) 3. [Template](#3-template) 4. [Style Blocks](#4-style-blocks) 5. [Script Blocks](#5-script-blocks) --- ## 1. File Structure An `.astro` file is composed of up to two sections described below. All are optional. When present, they must appear in this order: ``` ┌──────────────────────────────────┐ │ --- │ │ Component Script │ │ --- │ ├──────────────────────────────────┤ │ Template │ └──────────────────────────────────┘ ``` ### 1.1 Minimal examples ```astro

Hello, World!

``` ```astro --- const greeting = "Hello"; ---

{greeting}, World!

``` --- ## 2. Component Script (Frontmatter) The region between the two `---` fences. - The opening and closing fences are not required on their own line. Code may appear on the same line as both fences. - Only one component script is allowed per file. - Any amount of whitespace may appear before the opening fence or after the closing fence. - Any content may appear before the opening fence, but is customarily ignored. The component script is TypeScript. All standard TypeScript syntax is valid, apart from the exceptions and additions outlined in §2.1. ### 2.1 Top-level return `return` may be used at the top level: ```astro --- import { getUser } from "../lib/auth.js"; const user = await getUser(); if (!user) { return Astro.redirect("/login"); } --- ``` --- ## 3. Template The template is considered to be everything after the closing fence of the component script, or the entire file when there is no component script. The template mostly follows the [JSX specification](https://facebook.github.io/jsx/), with the differences and additions outlined in §3.2. ### 3.1 Whitespace between the component script and template is ignored Any amount of whitespace (spaces, tabs, newlines) between the closing fence of the component script and the start of the template is ignored and does not produce text nodes. ```astro --- const greeting = "Hello"; ---

{greeting}, World!

``` ### 3.2 Differences from JSX Unless mentioned otherwise, these differences apply both within the template and within expressions inside the template. #### HTML comments HTML comments `` are allowed. ```astro { } ``` #### In addition to the standard JSX fragment syntax `<>…`, `Fragment` is supported as the tag name for fragments, i.e. ``. ```astro
Item 1
Item 2
``` This form may accept attributes, unlike the shorthand syntax. #### `is:raw` Directive The `is:raw` attribute on any element allows the content to be treated as raw text instead of JSX. ```astro {<% non-JS content %>}
{not an expression, just text}
``` #### HTML doctype The [HTML doctype declaration](https://html.spec.whatwg.org/multipage/syntax.html#the-doctype) is allowed. ```astro ``` ##### Top-level text nodes Top-level text inside the template is treated as text nodes. ```astro Hello, World! ``` #### Whitespace in expressions Whitespace inside expressions `{ }` is preserved as text nodes, unlike JSX, where whitespace inside expression containers is ignored: ```astro {
Hello
} {
test
} { } ``` In Astro, all of these produce text nodes for the whitespace, whereas in JSX: - Whitespace around elements inside `{ }` is ignored - Whitespace-only expressions result in an empty expression, with no text nodes. #### Multiple root elements Unlike JSX, no single root element is required: ```astro
{
1
2
3
} ``` #### Attribute names Attribute names [follow the HTML conventions](https://html.spec.whatwg.org/multipage/syntax.html#syntax-attribute-name) and are not required to be valid JavaScript identifiers. For example, characters like hyphens and colons are allowed in attribute names: ```astro
``` #### Namespace in component names is not supported Colons in component names are not treated as namespace separators. For example: ```astro ``` Will be treated as a single component name (i.e. `Namespace:Component`). Spaces are not allowed in component names, so the following: ```astro ``` Would result in the component's name being Namespace, an attribute named `:` with no value, and an attribute named `Component` with no value. #### Attribute shorthand Attributes can use a shorthand syntax where `{prop}` is equivalent to `prop={prop}`: ```astro ``` #### Template literal attributes Attributes can use backticks for interpolation without opening an expression: ```astro ``` #### Empty expressions inside attributes Empty expressions `{}` inside attributes are allowed: ```astro ``` #### Comments inside opening tags Comments are allowed inside the opening tag of an element: ```astro
``` #### Less-than signs in text nodes Less-than signs `<` in text nodes are parsed following HTML rules, meaning they do not need to be escaped: ```astro

5 < 10

``` #### Non-ASCII tag names are not supported Tag names must use ASCII characters only. Non-ASCII tag names (e.g. `<日本>`) are not supported and are treated as text nodes. #### Unquoted attribute values Attribute values do not need to be quoted if they contain only alphanumeric characters, hyphens, underscores, and periods: ```astro ``` #### Unclosed HTML tags Like HTML, tags do not need to be explicitly closed. ```astro

Hello

World ``` It is up to the parser to optionally try to infer where tags close based on HTML parsing rules, or leave them unclosed. ##### Void elements HTML void elements do not need to be self-closed: ```astro
``` #### Element-specific parsing rules Certain HTML elements have special parsing rules that differ from the general rules outlined above. These include: - ` ``` Multiple ` ``` If any attributes are present, the content instead follows standard [HTML ` ``` ================================================ FILE: biome.json ================================================ { "$schema": "https://biomejs.dev/schemas/1.8.1/schema.json", "files": { "ignore": ["**/dist/**", "**/pnpm-lock.yaml", "wasm_exec.ts"], "include": ["packages/**"] }, "formatter": { "enabled": true, "indentStyle": "tab", "indentWidth": 2, "lineWidth": 100 }, "organizeImports": { "enabled": true }, "linter": { "enabled": true, "rules": { "recommended": true, "suspicious": { "noExplicitAny": "info", "noConsoleLog": "info" }, "style": { "useTemplate": { "level": "error", "fix": "safe" }, "noUnusedTemplateLiteral": { "level": "error", "fix": "safe" } } } }, "javascript": { "formatter": { "trailingCommas": "es5", "quoteStyle": "single", "semicolons": "always" } }, "json": { "parser": { "allowComments": true, "allowTrailingCommas": true }, "formatter": { "indentStyle": "space", "trailingCommas": "none" } }, "overrides": [ { "include": ["**/stress/**"], "linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } } }, { "include": ["package.json"], "json": { "formatter": { "lineWidth": 1 } } } ] } ================================================ FILE: cmd/astro-wasm/astro-wasm.go ================================================ //go:build js && wasm package main import ( "encoding/base64" "encoding/json" "fmt" "strings" "sync" "syscall/js" "unicode" "github.com/norunners/vert" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/loc" "github.com/withastro/compiler/internal/printer" "github.com/withastro/compiler/internal/sourcemap" t "github.com/withastro/compiler/internal/t" "github.com/withastro/compiler/internal/transform" wasm_utils "github.com/withastro/compiler/internal_wasm/utils" ) func main() { js.Global().Set("@astrojs/compiler", js.ValueOf(make(map[string]interface{}))) module := js.Global().Get("@astrojs/compiler") module.Set("transform", Transform()) module.Set("parse", Parse()) module.Set("convertToTSX", ConvertToTSX()) <-make(chan struct{}) } func jsString(j js.Value) string { if j.Equal(js.Undefined()) || j.Equal(js.Null()) { return "" } return j.String() } func jsBoolOptional(j js.Value, defaultValue bool) bool { if j.Equal(js.Undefined()) || j.Equal(js.Null()) { return defaultValue } return j.Bool() } func jsBool(j js.Value) bool { if j.Equal(js.Undefined()) || j.Equal(js.Null()) { return false } return j.Bool() } func makeParseOptions(options js.Value) t.ParseOptions { position := true pos := options.Get("position") if !pos.IsNull() && !pos.IsUndefined() { position = pos.Bool() } filename := jsString(options.Get("filename")) if filename == "" { filename = "" } return t.ParseOptions{ Filename: filename, Position: position, } } func makeTransformOptions(options js.Value) transform.TransformOptions { filename := jsString(options.Get("filename")) if filename == "" { filename = "" } normalizedFilename := jsString(options.Get("normalizedFilename")) if normalizedFilename == "" { normalizedFilename = filename } internalURL := jsString(options.Get("internalURL")) if internalURL == "" { internalURL = "astro/runtime/server/index.js" } sourcemap := jsString(options.Get("sourcemap")) if sourcemap == "" { sourcemap = "both" } astroGlobalArgs := jsString(options.Get("astroGlobalArgs")) compact := false if jsBool(options.Get("compact")) { compact = true } scopedSlot := false if jsBool(options.Get("resultScopedSlot")) { scopedSlot = true } transitionsAnimationURL := jsString(options.Get("transitionsAnimationURL")) if transitionsAnimationURL == "" { transitionsAnimationURL = "astro/components/viewtransitions.css" } annotateSourceFile := false if jsBool(options.Get("annotateSourceFile")) { annotateSourceFile = true } var resolvePath any = options.Get("resolvePath") var resolvePathFn func(string) string if resolvePath.(js.Value).Type() == js.TypeFunction { resolvePathFn = func(id string) string { result, _ := wasm_utils.Await(resolvePath.(js.Value).Invoke(id)) if result[0].Equal(js.Undefined()) || result[0].Equal(js.Null()) { return id } else { return result[0].String() } } } preprocessStyle := options.Get("preprocessStyle") scopedStyleStrategy := jsString(options.Get("scopedStyleStrategy")) if scopedStyleStrategy == "" { scopedStyleStrategy = "where" } return transform.TransformOptions{ Filename: filename, NormalizedFilename: normalizedFilename, InternalURL: internalURL, SourceMap: sourcemap, AstroGlobalArgs: astroGlobalArgs, Compact: compact, ResolvePath: resolvePathFn, PreprocessStyle: preprocessStyle, ResultScopedSlot: scopedSlot, ScopedStyleStrategy: scopedStyleStrategy, TransitionsAnimationURL: transitionsAnimationURL, AnnotateSourceFile: annotateSourceFile, } } func makeTSXOptions(options js.Value) printer.TSXOptions { includeScripts := jsBoolOptional(options.Get("includeScripts"), true) includeStyles := jsBoolOptional(options.Get("includeStyles"), true) return printer.TSXOptions{ IncludeScripts: includeScripts, IncludeStyles: includeStyles, } } type RawSourceMap struct { File string `js:"file"` Mappings string `js:"mappings"` Names []string `js:"names"` Sources []string `js:"sources"` SourcesContent []string `js:"sourcesContent"` Version int `js:"version"` } type HoistedScript struct { Code string `js:"code"` Src string `js:"src"` Type string `js:"type"` Map string `js:"map"` } type HydratedComponent struct { ExportName string `js:"exportName"` LocalName string `js:"localName"` Specifier string `js:"specifier"` ResolvedPath string `js:"resolvedPath"` } type ParseResult struct { AST string `js:"ast"` Diagnostics []loc.DiagnosticMessage `js:"diagnostics"` } type TSXResult struct { Code string `js:"code"` Map string `js:"map"` Diagnostics []loc.DiagnosticMessage `js:"diagnostics"` Ranges printer.TSXRanges `js:"metaRanges"` } type TransformResult struct { Code string `js:"code"` Diagnostics []loc.DiagnosticMessage `js:"diagnostics"` Map string `js:"map"` Scope string `js:"scope"` CSS []string `js:"css"` Scripts []HoistedScript `js:"scripts"` HydratedComponents []HydratedComponent `js:"hydratedComponents"` ClientOnlyComponents []HydratedComponent `js:"clientOnlyComponents"` ServerComponents []HydratedComponent `js:"serverComponents"` ContainsHead bool `js:"containsHead"` StyleError []string `js:"styleError"` Propagation bool `js:"propagation"` } // This is spawned as a goroutine to preprocess style nodes using an async function passed from JS func preprocessStyle(i int, style *astro.Node, transformOptions transform.TransformOptions, styleError *[]string, cb func()) { defer cb() if style.FirstChild == nil { return } attrs := wasm_utils.GetAttrs(style) data, _ := wasm_utils.Await(transformOptions.PreprocessStyle.(js.Value).Invoke(style.FirstChild.Data, attrs)) // note: Rollup (and by extension our Astro Vite plugin) allows for "undefined" and "null" responses if a transform wishes to skip this occurrence if data[0].Equal(js.Undefined()) || data[0].Equal(js.Null()) { return } // If an error return, override the style's CSS so the compiler doesn't hang // And return a styleError. The caller will use this to know that style processing failed. if err := jsString(data[0].Get("error")); err != "" { style.FirstChild.Data = "" //*styleError = err *styleError = append(*styleError, err) return } str := jsString(data[0].Get("code")) if str == "" { return } style.FirstChild.Data = str } func Parse() any { return js.FuncOf(func(this js.Value, args []js.Value) any { source := jsString(args[0]) parseOptions := makeParseOptions(js.Value(args[1])) transformOptions := makeTransformOptions(js.Value(args[1])) transformOptions.Scope = "xxxxxx" h := handler.NewHandler(source, parseOptions.Filename) var doc *astro.Node doc, err := astro.ParseWithOptions(strings.NewReader(source), astro.ParseOptionWithHandler(h), astro.ParseOptionEnableLiteral(true)) if err != nil { h.AppendError(err) } result := printer.PrintToJSON(source, doc, parseOptions) // AFTER printing, exec transformations to pickup any errors/warnings transform.Transform(doc, transformOptions, h) return vert.ValueOf(ParseResult{ AST: string(result.Output), Diagnostics: h.Diagnostics(), }).Value }) } func ConvertToTSX() any { return js.FuncOf(func(this js.Value, args []js.Value) any { source := jsString(args[0]) transformOptions := makeTransformOptions(js.Value(args[1])) transformOptions.Scope = "xxxxxx" h := handler.NewHandler(source, transformOptions.Filename) var doc *astro.Node doc, err := astro.ParseWithOptions(strings.NewReader(source), astro.ParseOptionWithHandler(h), astro.ParseOptionEnableLiteral(true)) if err != nil { h.AppendError(err) } tsxOptions := makeTSXOptions(js.Value(args[1])) result := printer.PrintToTSX(source, doc, tsxOptions, transformOptions, h) // AFTER printing, exec transformations to pickup any errors/warnings transform.Transform(doc, transformOptions, h) sourcemapString := createSourceMapString(source, result, transformOptions) code := string(result.Output) if transformOptions.SourceMap != "external" { inlineSourcemap := `//# sourceMappingURL=data:application/json;charset=utf-8;base64,` + base64.StdEncoding.EncodeToString([]byte(sourcemapString)) code += "\n" + inlineSourcemap } return vert.ValueOf(TSXResult{ Code: code, Map: sourcemapString, Diagnostics: h.Diagnostics(), Ranges: result.TSXRanges, }).Value }) } func Transform() any { return js.FuncOf(func(this js.Value, args []js.Value) any { source := strings.TrimRightFunc(jsString(args[0]), unicode.IsSpace) transformOptions := makeTransformOptions(js.Value(args[1])) scopeStr := transformOptions.NormalizedFilename if scopeStr == "" { scopeStr = source } transformOptions.Scope = astro.HashString(scopeStr) h := handler.NewHandler(source, transformOptions.Filename) styleError := []string{} promiseHandle := js.FuncOf(func(this js.Value, args []js.Value) any { resolve := args[0] reject := args[1] go func() { var doc *astro.Node defer func() { if err := recover(); err != nil { reject.Invoke(wasm_utils.ErrorToJSError(h, err.(error))) return } }() doc, err := astro.ParseWithOptions(strings.NewReader(source), astro.ParseOptionWithHandler(h)) if err != nil { reject.Invoke(wasm_utils.ErrorToJSError(h, err)) return } // Hoist styles and scripts to the top-level transform.ExtractStyles(doc, &transformOptions) // Pre-process styles // Important! These goroutines need to be spawned from this file or they don't work var wg sync.WaitGroup if len(doc.Styles) > 0 { if transformOptions.PreprocessStyle.(js.Value).Type() == js.TypeFunction { for i, style := range doc.Styles { wg.Add(1) i := i go preprocessStyle(i, style, transformOptions, &styleError, wg.Done) } } } // Wait for all the style goroutines to finish wg.Wait() // Perform CSS and element scoping as needed transform.Transform(doc, transformOptions, h) css := []string{} scripts := []HoistedScript{} hydratedComponents := []HydratedComponent{} clientOnlyComponents := []HydratedComponent{} serverComponents := []HydratedComponent{} css_result := printer.PrintCSS(source, doc, transformOptions) for _, bytes := range css_result.Output { css = append(css, string(bytes)) } // Append hoisted scripts for _, node := range doc.Scripts { src := astro.GetAttribute(node, "src") script := HoistedScript{ Src: "", Code: "", Type: "", Map: "", } if src != nil { script.Type = "external" script.Src = src.Val } else if node.FirstChild != nil { script.Type = "inline" if transformOptions.SourceMap != "" { isLine := func(r rune) bool { return r == '\r' || r == '\n' } isNotLine := func(r rune) bool { return !(r == '\r' || r == '\n') } output := make([]byte, 0) builder := sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(source, len(strings.Split(source, "\n")))) sourcesContent, _ := json.Marshal(source) if len(node.FirstChild.Loc) > 0 { i := node.FirstChild.Loc[0].Start nonWS := strings.IndexFunc(node.FirstChild.Data, isNotLine) i += nonWS for _, ln := range strings.Split(strings.TrimFunc(node.FirstChild.Data, isLine), "\n") { content := []byte(ln) content = append(content, '\n') for j, b := range content { if j == 0 || !unicode.IsSpace(rune(b)) { builder.AddSourceMapping(loc.Loc{Start: i}, output) } output = append(output, b) i += 1 } } output = append(output, '\n') } else { output = append(output, []byte(strings.TrimSpace(node.FirstChild.Data))...) } sourcemap := fmt.Sprintf( `{ "version": 3, "sources": ["%s"], "sourcesContent": [%s], "mappings": "%s", "names": [] }`, transformOptions.Filename, string(sourcesContent), string(builder.GenerateChunk(output).Buffer), ) script.Map = sourcemap script.Code = string(output) } else { script.Code = node.FirstChild.Data } } // sourcemapString := createSourceMapString(source, result, transformOptions) // inlineSourcemap := `//# sourceMappingURL=data:application/json;charset=utf-8;base64,` + base64.StdEncoding.EncodeToString([]byte(sourcemapString)) scripts = append(scripts, script) } for _, c := range doc.HydratedComponents { hydratedComponents = append(hydratedComponents, HydratedComponent{ ExportName: c.ExportName, Specifier: c.Specifier, ResolvedPath: c.ResolvedPath, }) } for _, c := range doc.ClientOnlyComponents { clientOnlyComponents = append(clientOnlyComponents, HydratedComponent{ ExportName: c.ExportName, Specifier: c.Specifier, ResolvedPath: c.ResolvedPath, }) } for _, c := range doc.ServerComponents { serverComponents = append(serverComponents, HydratedComponent{ ExportName: c.ExportName, LocalName: c.LocalName, Specifier: c.Specifier, ResolvedPath: c.ResolvedPath, }) } var value vert.Value result := printer.PrintToJS(source, doc, len(css), transformOptions, h) transformResult := &TransformResult{ CSS: css, Scope: transformOptions.Scope, Scripts: scripts, HydratedComponents: hydratedComponents, ClientOnlyComponents: clientOnlyComponents, ServerComponents: serverComponents, ContainsHead: doc.ContainsHead, StyleError: styleError, Propagation: doc.HeadPropagation, } switch transformOptions.SourceMap { case "external": value = createExternalSourceMap(source, transformResult, result, transformOptions) case "both": value = createBothSourceMap(source, transformResult, result, transformOptions) case "inline": value = createInlineSourceMap(source, transformResult, result, transformOptions) default: transformResult.Code = string(result.Output) transformResult.Map = "" value = vert.ValueOf(transformResult) } value.Set("diagnostics", vert.ValueOf(h.Diagnostics()).Value) resolve.Invoke(value.Value) }() return nil }) defer promiseHandle.Release() // Create and return the Promise object promiseConstructor := js.Global().Get("Promise") return promiseConstructor.New(promiseHandle) }) } func createSourceMapString(source string, result printer.PrintResult, transformOptions transform.TransformOptions) string { sourcesContent, _ := json.Marshal(source) sourcemap := RawSourceMap{ Version: 3, Sources: []string{transformOptions.Filename}, SourcesContent: []string{string(sourcesContent)}, Mappings: string(result.SourceMapChunk.Buffer), } return fmt.Sprintf(`{ "version": 3, "sources": ["%s"], "sourcesContent": [%s], "mappings": "%s", "names": [] }`, sourcemap.Sources[0], sourcemap.SourcesContent[0], sourcemap.Mappings) } func createExternalSourceMap(source string, transformResult *TransformResult, result printer.PrintResult, transformOptions transform.TransformOptions) vert.Value { transformResult.Code = string(result.Output) transformResult.Map = createSourceMapString(source, result, transformOptions) return vert.ValueOf(transformResult) } func createInlineSourceMap(source string, transformResult *TransformResult, result printer.PrintResult, transformOptions transform.TransformOptions) vert.Value { sourcemapString := createSourceMapString(source, result, transformOptions) inlineSourcemap := `//# sourceMappingURL=data:application/json;charset=utf-8;base64,` + base64.StdEncoding.EncodeToString([]byte(sourcemapString)) transformResult.Code = string(result.Output) + "\n" + inlineSourcemap transformResult.Map = "" return vert.ValueOf(transformResult) } func createBothSourceMap(source string, transformResult *TransformResult, result printer.PrintResult, transformOptions transform.TransformOptions) vert.Value { sourcemapString := createSourceMapString(source, result, transformOptions) inlineSourcemap := `//# sourceMappingURL=data:application/json;charset=utf-8;base64,` + base64.StdEncoding.EncodeToString([]byte(sourcemapString)) transformResult.Code = string(result.Output) + "\n" + inlineSourcemap transformResult.Map = sourcemapString return vert.ValueOf(transformResult) } ================================================ FILE: go.mod ================================================ module github.com/withastro/compiler go 1.21 require ( github.com/gkampitakis/go-snaps v0.5.2 github.com/google/go-cmp v0.5.9 github.com/iancoleman/strcase v0.2.0 github.com/lithammer/dedent v1.1.0 github.com/norunners/vert v0.0.0-20221203075838-106a353d42dd github.com/tdewolff/parse/v2 v2.6.4 golang.org/x/net v0.0.0-20221004154528-8021a29435af golang.org/x/sys v0.0.0-20221010170243-090e33056c14 ) require ( github.com/gkampitakis/ciinfo v0.3.0 // indirect github.com/gkampitakis/go-diff v1.3.2 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/maruel/natural v1.1.1 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/tidwall/gjson v1.17.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/sjson v1.2.5 // indirect ) ================================================ FILE: go.sum ================================================ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/gkampitakis/ciinfo v0.3.0 h1:gWZlOC2+RYYttL0hBqcoQhM7h1qNkVqvRCV1fOvpAv8= github.com/gkampitakis/ciinfo v0.3.0/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M= github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= github.com/gkampitakis/go-snaps v0.5.2 h1:ay/6f7WHwRkOgpBec9DjMLRBAApziJommZ21NkOOCwY= github.com/gkampitakis/go-snaps v0.5.2/go.mod h1:ZABkO14uCuVxBHAXAfKG+bqNz+aa1bGPAg8jkI0Nk8Y= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lithammer/dedent v1.1.0 h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY= github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc= github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= github.com/norunners/vert v0.0.0-20221203075838-106a353d42dd h1:tHn7K76q9eJ2rXLH/OoxHkdprM3l2A+0kdxOrKYcV7U= github.com/norunners/vert v0.0.0-20221203075838-106a353d42dd/go.mod h1:8iuQLyTSvuzwy6R6l6w6J+i9c/6xPEVoVdcMz9E8FEw= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/tdewolff/parse/v2 v2.6.4 h1:KCkDvNUMof10e3QExio9OPZJT8SbdKojLBumw8YZycQ= github.com/tdewolff/parse/v2 v2.6.4/go.mod h1:woz0cgbLwFdtbjJu8PIKxhW05KplTFQkOdX78o+Jgrs= github.com/tdewolff/test v1.0.7 h1:8Vs0142DmPFW/bQeHRP3MV19m1gvndjUb1sn8yy74LM= github.com/tdewolff/test v1.0.7/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= golang.org/x/net v0.0.0-20221004154528-8021a29435af h1:wv66FM3rLZGPdxpYL+ApnDe2HzHcTFta3z5nsc13wI4= golang.org/x/net v0.0.0-20221004154528-8021a29435af/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/sys v0.0.0-20221010170243-090e33056c14 h1:k5II8e6QD8mITdi+okbbmR/cIyEbeXLBhy5Ha4nevyc= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= ================================================ FILE: internal/const.go ================================================ // Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import a "golang.org/x/net/html/atom" // Section 12.2.4.2 of the HTML5 specification says "The following elements // have varying levels of special parsing rules". // https://html.spec.whatwg.org/multipage/syntax.html#the-stack-of-open-elements var isSpecialElementMap = map[string]bool{ "address": true, "applet": true, "area": true, "article": true, "aside": true, "base": true, "basefont": true, "bgsound": true, "blockquote": true, "body": true, "br": true, "button": true, "caption": true, "center": true, "col": true, "colgroup": true, "dd": true, "details": true, "dir": true, "div": true, "dl": true, "dt": true, "embed": true, "fieldset": true, "figcaption": true, "figure": true, "footer": true, "form": true, "frame": true, "frameset": true, "h1": true, "h2": true, "h3": true, "h4": true, "h5": true, "h6": true, "head": true, "header": true, "hgroup": true, "hr": true, "html": true, "iframe": true, "img": true, "input": true, "keygen": true, // "keygen" has been removed from the spec, but are kept here for backwards compatibility. "li": true, "link": true, "listing": true, "main": true, "marquee": true, "menu": true, "meta": true, "nav": true, "noembed": true, "noframes": true, "noscript": true, "object": true, "ol": true, "p": true, "param": true, "plaintext": true, "pre": true, "script": true, "section": true, "select": true, "source": true, "style": true, "summary": true, "table": true, "tbody": true, "td": true, "template": true, "textarea": true, "tfoot": true, "th": true, "thead": true, "title": true, "tr": true, "track": true, "ul": true, "wbr": true, "xmp": true, } func isSpecialElement(element *Node) bool { switch element.Namespace { case "", "html": return isSpecialElementMap[element.Data] case "math": switch element.Data { case "mi", "mo", "mn", "ms", "mtext", "annotation-xml": return true } case "svg": switch element.Data { case "foreignObject", "desc", "title": return true } } return false } var knownDirectiveMap = map[string]bool{ "client:load": true, "client:idle": true, "client:visible": true, "client:only": true, "class:list": true, "set:text": true, "set:html": true, } func IsKnownDirective(element *Node, attr *Attribute) bool { if knownDirectiveMap[attr.Key] { return true } if element.DataAtom == a.Script { return attr.Key == "hoist" } if element.DataAtom == a.Style { return attr.Key == "global" } return false } ================================================ FILE: internal/doc.go ================================================ // Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // This example demonstrates parsing HTML data and walking the resulting tree. package astro ================================================ FILE: internal/doctype.go ================================================ // Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import ( "strings" ) // parseDoctype parses the data from a DoctypeToken into a name, // public identifier, and system identifier. It returns a Node whose Type // is DoctypeNode, whose Data is the name, and which has attributes // named "system" and "public" for the two identifiers if they were present. // quirks is whether the document should be parsed in "quirks mode". func parseDoctype(s string) (n *Node, quirks bool) { n = &Node{Type: DoctypeNode} // Find the name. space := strings.IndexAny(s, whitespace) if space == -1 { space = len(s) } n.Data = s[:space] // The comparison to "html" is case-sensitive. if n.Data != "html" { quirks = true } n.Data = strings.ToLower(n.Data) s = strings.TrimLeft(s[space:], whitespace) if len(s) < 6 { // It can't start with "PUBLIC" or "SYSTEM". // Ignore the rest of the string. return n, quirks || s != "" } key := strings.ToLower(s[:6]) s = s[6:] for key == "public" || key == "system" { s = strings.TrimLeft(s, whitespace) if s == "" { break } quote := s[0] if quote != '"' && quote != '\'' { break } s = s[1:] q := strings.IndexRune(s, rune(quote)) var id string if q == -1 { id = s s = "" } else { id = s[:q] s = s[q+1:] } n.Attr = append(n.Attr, Attribute{Key: key, Val: id}) if key == "public" { key = "system" } else { key = "" } } if key != "" || s != "" { quirks = true } else if len(n.Attr) > 0 { if n.Attr[0].Key == "public" { public := strings.ToLower(n.Attr[0].Val) switch public { case "-//w3o//dtd w3 html strict 3.0//en//", "-/w3d/dtd html 4.0 transitional/en", "html": quirks = true default: for _, q := range quirkyIDs { if strings.HasPrefix(public, q) { quirks = true break } } } // The following two public IDs only cause quirks mode if there is no system ID. if len(n.Attr) == 1 && (strings.HasPrefix(public, "-//w3c//dtd html 4.01 frameset//") || strings.HasPrefix(public, "-//w3c//dtd html 4.01 transitional//")) { quirks = true } } if lastAttr := n.Attr[len(n.Attr)-1]; lastAttr.Key == "system" && strings.ToLower(lastAttr.Val) == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd" { quirks = true } } return n, quirks } // quirkyIDs is a list of public doctype identifiers that cause a document // to be interpreted in quirks mode. The identifiers should be in lower case. var quirkyIDs = []string{ "+//silmaril//dtd html pro v0r11 19970101//", "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", "-//as//dtd html 3.0 aswedit + extensions//", "-//ietf//dtd html 2.0 level 1//", "-//ietf//dtd html 2.0 level 2//", "-//ietf//dtd html 2.0 strict level 1//", "-//ietf//dtd html 2.0 strict level 2//", "-//ietf//dtd html 2.0 strict//", "-//ietf//dtd html 2.0//", "-//ietf//dtd html 2.1e//", "-//ietf//dtd html 3.0//", "-//ietf//dtd html 3.2 final//", "-//ietf//dtd html 3.2//", "-//ietf//dtd html 3//", "-//ietf//dtd html level 0//", "-//ietf//dtd html level 1//", "-//ietf//dtd html level 2//", "-//ietf//dtd html level 3//", "-//ietf//dtd html strict level 0//", "-//ietf//dtd html strict level 1//", "-//ietf//dtd html strict level 2//", "-//ietf//dtd html strict level 3//", "-//ietf//dtd html strict//", "-//ietf//dtd html//", "-//metrius//dtd metrius presentational//", "-//microsoft//dtd internet explorer 2.0 html strict//", "-//microsoft//dtd internet explorer 2.0 html//", "-//microsoft//dtd internet explorer 2.0 tables//", "-//microsoft//dtd internet explorer 3.0 html strict//", "-//microsoft//dtd internet explorer 3.0 html//", "-//microsoft//dtd internet explorer 3.0 tables//", "-//netscape comm. corp.//dtd html//", "-//netscape comm. corp.//dtd strict html//", "-//o'reilly and associates//dtd html 2.0//", "-//o'reilly and associates//dtd html extended 1.0//", "-//o'reilly and associates//dtd html extended relaxed 1.0//", "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", "-//spyglass//dtd html 2.0 extended//", "-//sq//dtd html 2.0 hotmetal + extensions//", "-//sun microsystems corp.//dtd hotjava html//", "-//sun microsystems corp.//dtd hotjava strict html//", "-//w3c//dtd html 3 1995-03-24//", "-//w3c//dtd html 3.2 draft//", "-//w3c//dtd html 3.2 final//", "-//w3c//dtd html 3.2//", "-//w3c//dtd html 3.2s draft//", "-//w3c//dtd html 4.0 frameset//", "-//w3c//dtd html 4.0 transitional//", "-//w3c//dtd html experimental 19960712//", "-//w3c//dtd html experimental 970421//", "-//w3c//dtd w3 html//", "-//w3o//dtd w3 html 3.0//", "-//webtechs//dtd mozilla html 2.0//", "-//webtechs//dtd mozilla html//", } ================================================ FILE: internal/entity.go ================================================ // Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro // All entities that do not end with ';' are 6 or fewer bytes long. const longestEntityWithoutSemicolon = 6 // entity is a map from HTML entity names to their values. The semicolon matters: // https://html.spec.whatwg.org/multipage/syntax.html#named-character-references // lists both "amp" and "amp;" as two separate entries. // // Note that the HTML5 list is larger than the HTML4 list at // http://www.w3.org/TR/html4/sgml/entities.html var entity = map[string]rune{ "AElig;": '\U000000C6', "AMP;": '\U00000026', "Aacute;": '\U000000C1', "Abreve;": '\U00000102', "Acirc;": '\U000000C2', "Acy;": '\U00000410', "Afr;": '\U0001D504', "Agrave;": '\U000000C0', "Alpha;": '\U00000391', "Amacr;": '\U00000100', "And;": '\U00002A53', "Aogon;": '\U00000104', "Aopf;": '\U0001D538', "ApplyFunction;": '\U00002061', "Aring;": '\U000000C5', "Ascr;": '\U0001D49C', "Assign;": '\U00002254', "Atilde;": '\U000000C3', "Auml;": '\U000000C4', "Backslash;": '\U00002216', "Barv;": '\U00002AE7', "Barwed;": '\U00002306', "Bcy;": '\U00000411', "Because;": '\U00002235', "Bernoullis;": '\U0000212C', "Beta;": '\U00000392', "Bfr;": '\U0001D505', "Bopf;": '\U0001D539', "Breve;": '\U000002D8', "Bscr;": '\U0000212C', "Bumpeq;": '\U0000224E', "CHcy;": '\U00000427', "COPY;": '\U000000A9', "Cacute;": '\U00000106', "Cap;": '\U000022D2', "CapitalDifferentialD;": '\U00002145', "Cayleys;": '\U0000212D', "Ccaron;": '\U0000010C', "Ccedil;": '\U000000C7', "Ccirc;": '\U00000108', "Cconint;": '\U00002230', "Cdot;": '\U0000010A', "Cedilla;": '\U000000B8', "CenterDot;": '\U000000B7', "Cfr;": '\U0000212D', "Chi;": '\U000003A7', "CircleDot;": '\U00002299', "CircleMinus;": '\U00002296', "CirclePlus;": '\U00002295', "CircleTimes;": '\U00002297', "ClockwiseContourIntegral;": '\U00002232', "CloseCurlyDoubleQuote;": '\U0000201D', "CloseCurlyQuote;": '\U00002019', "Colon;": '\U00002237', "Colone;": '\U00002A74', "Congruent;": '\U00002261', "Conint;": '\U0000222F', "ContourIntegral;": '\U0000222E', "Copf;": '\U00002102', "Coproduct;": '\U00002210', "CounterClockwiseContourIntegral;": '\U00002233', "Cross;": '\U00002A2F', "Cscr;": '\U0001D49E', "Cup;": '\U000022D3', "CupCap;": '\U0000224D', "DD;": '\U00002145', "DDotrahd;": '\U00002911', "DJcy;": '\U00000402', "DScy;": '\U00000405', "DZcy;": '\U0000040F', "Dagger;": '\U00002021', "Darr;": '\U000021A1', "Dashv;": '\U00002AE4', "Dcaron;": '\U0000010E', "Dcy;": '\U00000414', "Del;": '\U00002207', "Delta;": '\U00000394', "Dfr;": '\U0001D507', "DiacriticalAcute;": '\U000000B4', "DiacriticalDot;": '\U000002D9', "DiacriticalDoubleAcute;": '\U000002DD', "DiacriticalGrave;": '\U00000060', "DiacriticalTilde;": '\U000002DC', "Diamond;": '\U000022C4', "DifferentialD;": '\U00002146', "Dopf;": '\U0001D53B', "Dot;": '\U000000A8', "DotDot;": '\U000020DC', "DotEqual;": '\U00002250', "DoubleContourIntegral;": '\U0000222F', "DoubleDot;": '\U000000A8', "DoubleDownArrow;": '\U000021D3', "DoubleLeftArrow;": '\U000021D0', "DoubleLeftRightArrow;": '\U000021D4', "DoubleLeftTee;": '\U00002AE4', "DoubleLongLeftArrow;": '\U000027F8', "DoubleLongLeftRightArrow;": '\U000027FA', "DoubleLongRightArrow;": '\U000027F9', "DoubleRightArrow;": '\U000021D2', "DoubleRightTee;": '\U000022A8', "DoubleUpArrow;": '\U000021D1', "DoubleUpDownArrow;": '\U000021D5', "DoubleVerticalBar;": '\U00002225', "DownArrow;": '\U00002193', "DownArrowBar;": '\U00002913', "DownArrowUpArrow;": '\U000021F5', "DownBreve;": '\U00000311', "DownLeftRightVector;": '\U00002950', "DownLeftTeeVector;": '\U0000295E', "DownLeftVector;": '\U000021BD', "DownLeftVectorBar;": '\U00002956', "DownRightTeeVector;": '\U0000295F', "DownRightVector;": '\U000021C1', "DownRightVectorBar;": '\U00002957', "DownTee;": '\U000022A4', "DownTeeArrow;": '\U000021A7', "Downarrow;": '\U000021D3', "Dscr;": '\U0001D49F', "Dstrok;": '\U00000110', "ENG;": '\U0000014A', "ETH;": '\U000000D0', "Eacute;": '\U000000C9', "Ecaron;": '\U0000011A', "Ecirc;": '\U000000CA', "Ecy;": '\U0000042D', "Edot;": '\U00000116', "Efr;": '\U0001D508', "Egrave;": '\U000000C8', "Element;": '\U00002208', "Emacr;": '\U00000112', "EmptySmallSquare;": '\U000025FB', "EmptyVerySmallSquare;": '\U000025AB', "Eogon;": '\U00000118', "Eopf;": '\U0001D53C', "Epsilon;": '\U00000395', "Equal;": '\U00002A75', "EqualTilde;": '\U00002242', "Equilibrium;": '\U000021CC', "Escr;": '\U00002130', "Esim;": '\U00002A73', "Eta;": '\U00000397', "Euml;": '\U000000CB', "Exists;": '\U00002203', "ExponentialE;": '\U00002147', "Fcy;": '\U00000424', "Ffr;": '\U0001D509', "FilledSmallSquare;": '\U000025FC', "FilledVerySmallSquare;": '\U000025AA', "Fopf;": '\U0001D53D', "ForAll;": '\U00002200', "Fouriertrf;": '\U00002131', "Fscr;": '\U00002131', "GJcy;": '\U00000403', "GT;": '\U0000003E', "Gamma;": '\U00000393', "Gammad;": '\U000003DC', "Gbreve;": '\U0000011E', "Gcedil;": '\U00000122', "Gcirc;": '\U0000011C', "Gcy;": '\U00000413', "Gdot;": '\U00000120', "Gfr;": '\U0001D50A', "Gg;": '\U000022D9', "Gopf;": '\U0001D53E', "GreaterEqual;": '\U00002265', "GreaterEqualLess;": '\U000022DB', "GreaterFullEqual;": '\U00002267', "GreaterGreater;": '\U00002AA2', "GreaterLess;": '\U00002277', "GreaterSlantEqual;": '\U00002A7E', "GreaterTilde;": '\U00002273', "Gscr;": '\U0001D4A2', "Gt;": '\U0000226B', "HARDcy;": '\U0000042A', "Hacek;": '\U000002C7', "Hat;": '\U0000005E', "Hcirc;": '\U00000124', "Hfr;": '\U0000210C', "HilbertSpace;": '\U0000210B', "Hopf;": '\U0000210D', "HorizontalLine;": '\U00002500', "Hscr;": '\U0000210B', "Hstrok;": '\U00000126', "HumpDownHump;": '\U0000224E', "HumpEqual;": '\U0000224F', "IEcy;": '\U00000415', "IJlig;": '\U00000132', "IOcy;": '\U00000401', "Iacute;": '\U000000CD', "Icirc;": '\U000000CE', "Icy;": '\U00000418', "Idot;": '\U00000130', "Ifr;": '\U00002111', "Igrave;": '\U000000CC', "Im;": '\U00002111', "Imacr;": '\U0000012A', "ImaginaryI;": '\U00002148', "Implies;": '\U000021D2', "Int;": '\U0000222C', "Integral;": '\U0000222B', "Intersection;": '\U000022C2', "InvisibleComma;": '\U00002063', "InvisibleTimes;": '\U00002062', "Iogon;": '\U0000012E', "Iopf;": '\U0001D540', "Iota;": '\U00000399', "Iscr;": '\U00002110', "Itilde;": '\U00000128', "Iukcy;": '\U00000406', "Iuml;": '\U000000CF', "Jcirc;": '\U00000134', "Jcy;": '\U00000419', "Jfr;": '\U0001D50D', "Jopf;": '\U0001D541', "Jscr;": '\U0001D4A5', "Jsercy;": '\U00000408', "Jukcy;": '\U00000404', "KHcy;": '\U00000425', "KJcy;": '\U0000040C', "Kappa;": '\U0000039A', "Kcedil;": '\U00000136', "Kcy;": '\U0000041A', "Kfr;": '\U0001D50E', "Kopf;": '\U0001D542', "Kscr;": '\U0001D4A6', "LJcy;": '\U00000409', "LT;": '\U0000003C', "Lacute;": '\U00000139', "Lambda;": '\U0000039B', "Lang;": '\U000027EA', "Laplacetrf;": '\U00002112', "Larr;": '\U0000219E', "Lcaron;": '\U0000013D', "Lcedil;": '\U0000013B', "Lcy;": '\U0000041B', "LeftAngleBracket;": '\U000027E8', "LeftArrow;": '\U00002190', "LeftArrowBar;": '\U000021E4', "LeftArrowRightArrow;": '\U000021C6', "LeftCeiling;": '\U00002308', "LeftDoubleBracket;": '\U000027E6', "LeftDownTeeVector;": '\U00002961', "LeftDownVector;": '\U000021C3', "LeftDownVectorBar;": '\U00002959', "LeftFloor;": '\U0000230A', "LeftRightArrow;": '\U00002194', "LeftRightVector;": '\U0000294E', "LeftTee;": '\U000022A3', "LeftTeeArrow;": '\U000021A4', "LeftTeeVector;": '\U0000295A', "LeftTriangle;": '\U000022B2', "LeftTriangleBar;": '\U000029CF', "LeftTriangleEqual;": '\U000022B4', "LeftUpDownVector;": '\U00002951', "LeftUpTeeVector;": '\U00002960', "LeftUpVector;": '\U000021BF', "LeftUpVectorBar;": '\U00002958', "LeftVector;": '\U000021BC', "LeftVectorBar;": '\U00002952', "Leftarrow;": '\U000021D0', "Leftrightarrow;": '\U000021D4', "LessEqualGreater;": '\U000022DA', "LessFullEqual;": '\U00002266', "LessGreater;": '\U00002276', "LessLess;": '\U00002AA1', "LessSlantEqual;": '\U00002A7D', "LessTilde;": '\U00002272', "Lfr;": '\U0001D50F', "Ll;": '\U000022D8', "Lleftarrow;": '\U000021DA', "Lmidot;": '\U0000013F', "LongLeftArrow;": '\U000027F5', "LongLeftRightArrow;": '\U000027F7', "LongRightArrow;": '\U000027F6', "Longleftarrow;": '\U000027F8', "Longleftrightarrow;": '\U000027FA', "Longrightarrow;": '\U000027F9', "Lopf;": '\U0001D543', "LowerLeftArrow;": '\U00002199', "LowerRightArrow;": '\U00002198', "Lscr;": '\U00002112', "Lsh;": '\U000021B0', "Lstrok;": '\U00000141', "Lt;": '\U0000226A', "Map;": '\U00002905', "Mcy;": '\U0000041C', "MediumSpace;": '\U0000205F', "Mellintrf;": '\U00002133', "Mfr;": '\U0001D510', "MinusPlus;": '\U00002213', "Mopf;": '\U0001D544', "Mscr;": '\U00002133', "Mu;": '\U0000039C', "NJcy;": '\U0000040A', "Nacute;": '\U00000143', "Ncaron;": '\U00000147', "Ncedil;": '\U00000145', "Ncy;": '\U0000041D', "NegativeMediumSpace;": '\U0000200B', "NegativeThickSpace;": '\U0000200B', "NegativeThinSpace;": '\U0000200B', "NegativeVeryThinSpace;": '\U0000200B', "NestedGreaterGreater;": '\U0000226B', "NestedLessLess;": '\U0000226A', "NewLine;": '\U0000000A', "Nfr;": '\U0001D511', "NoBreak;": '\U00002060', "NonBreakingSpace;": '\U000000A0', "Nopf;": '\U00002115', "Not;": '\U00002AEC', "NotCongruent;": '\U00002262', "NotCupCap;": '\U0000226D', "NotDoubleVerticalBar;": '\U00002226', "NotElement;": '\U00002209', "NotEqual;": '\U00002260', "NotExists;": '\U00002204', "NotGreater;": '\U0000226F', "NotGreaterEqual;": '\U00002271', "NotGreaterLess;": '\U00002279', "NotGreaterTilde;": '\U00002275', "NotLeftTriangle;": '\U000022EA', "NotLeftTriangleEqual;": '\U000022EC', "NotLess;": '\U0000226E', "NotLessEqual;": '\U00002270', "NotLessGreater;": '\U00002278', "NotLessTilde;": '\U00002274', "NotPrecedes;": '\U00002280', "NotPrecedesSlantEqual;": '\U000022E0', "NotReverseElement;": '\U0000220C', "NotRightTriangle;": '\U000022EB', "NotRightTriangleEqual;": '\U000022ED', "NotSquareSubsetEqual;": '\U000022E2', "NotSquareSupersetEqual;": '\U000022E3', "NotSubsetEqual;": '\U00002288', "NotSucceeds;": '\U00002281', "NotSucceedsSlantEqual;": '\U000022E1', "NotSupersetEqual;": '\U00002289', "NotTilde;": '\U00002241', "NotTildeEqual;": '\U00002244', "NotTildeFullEqual;": '\U00002247', "NotTildeTilde;": '\U00002249', "NotVerticalBar;": '\U00002224', "Nscr;": '\U0001D4A9', "Ntilde;": '\U000000D1', "Nu;": '\U0000039D', "OElig;": '\U00000152', "Oacute;": '\U000000D3', "Ocirc;": '\U000000D4', "Ocy;": '\U0000041E', "Odblac;": '\U00000150', "Ofr;": '\U0001D512', "Ograve;": '\U000000D2', "Omacr;": '\U0000014C', "Omega;": '\U000003A9', "Omicron;": '\U0000039F', "Oopf;": '\U0001D546', "OpenCurlyDoubleQuote;": '\U0000201C', "OpenCurlyQuote;": '\U00002018', "Or;": '\U00002A54', "Oscr;": '\U0001D4AA', "Oslash;": '\U000000D8', "Otilde;": '\U000000D5', "Otimes;": '\U00002A37', "Ouml;": '\U000000D6', "OverBar;": '\U0000203E', "OverBrace;": '\U000023DE', "OverBracket;": '\U000023B4', "OverParenthesis;": '\U000023DC', "PartialD;": '\U00002202', "Pcy;": '\U0000041F', "Pfr;": '\U0001D513', "Phi;": '\U000003A6', "Pi;": '\U000003A0', "PlusMinus;": '\U000000B1', "Poincareplane;": '\U0000210C', "Popf;": '\U00002119', "Pr;": '\U00002ABB', "Precedes;": '\U0000227A', "PrecedesEqual;": '\U00002AAF', "PrecedesSlantEqual;": '\U0000227C', "PrecedesTilde;": '\U0000227E', "Prime;": '\U00002033', "Product;": '\U0000220F', "Proportion;": '\U00002237', "Proportional;": '\U0000221D', "Pscr;": '\U0001D4AB', "Psi;": '\U000003A8', "QUOT;": '\U00000022', "Qfr;": '\U0001D514', "Qopf;": '\U0000211A', "Qscr;": '\U0001D4AC', "RBarr;": '\U00002910', "REG;": '\U000000AE', "Racute;": '\U00000154', "Rang;": '\U000027EB', "Rarr;": '\U000021A0', "Rarrtl;": '\U00002916', "Rcaron;": '\U00000158', "Rcedil;": '\U00000156', "Rcy;": '\U00000420', "Re;": '\U0000211C', "ReverseElement;": '\U0000220B', "ReverseEquilibrium;": '\U000021CB', "ReverseUpEquilibrium;": '\U0000296F', "Rfr;": '\U0000211C', "Rho;": '\U000003A1', "RightAngleBracket;": '\U000027E9', "RightArrow;": '\U00002192', "RightArrowBar;": '\U000021E5', "RightArrowLeftArrow;": '\U000021C4', "RightCeiling;": '\U00002309', "RightDoubleBracket;": '\U000027E7', "RightDownTeeVector;": '\U0000295D', "RightDownVector;": '\U000021C2', "RightDownVectorBar;": '\U00002955', "RightFloor;": '\U0000230B', "RightTee;": '\U000022A2', "RightTeeArrow;": '\U000021A6', "RightTeeVector;": '\U0000295B', "RightTriangle;": '\U000022B3', "RightTriangleBar;": '\U000029D0', "RightTriangleEqual;": '\U000022B5', "RightUpDownVector;": '\U0000294F', "RightUpTeeVector;": '\U0000295C', "RightUpVector;": '\U000021BE', "RightUpVectorBar;": '\U00002954', "RightVector;": '\U000021C0', "RightVectorBar;": '\U00002953', "Rightarrow;": '\U000021D2', "Ropf;": '\U0000211D', "RoundImplies;": '\U00002970', "Rrightarrow;": '\U000021DB', "Rscr;": '\U0000211B', "Rsh;": '\U000021B1', "RuleDelayed;": '\U000029F4', "SHCHcy;": '\U00000429', "SHcy;": '\U00000428', "SOFTcy;": '\U0000042C', "Sacute;": '\U0000015A', "Sc;": '\U00002ABC', "Scaron;": '\U00000160', "Scedil;": '\U0000015E', "Scirc;": '\U0000015C', "Scy;": '\U00000421', "Sfr;": '\U0001D516', "ShortDownArrow;": '\U00002193', "ShortLeftArrow;": '\U00002190', "ShortRightArrow;": '\U00002192', "ShortUpArrow;": '\U00002191', "Sigma;": '\U000003A3', "SmallCircle;": '\U00002218', "Sopf;": '\U0001D54A', "Sqrt;": '\U0000221A', "Square;": '\U000025A1', "SquareIntersection;": '\U00002293', "SquareSubset;": '\U0000228F', "SquareSubsetEqual;": '\U00002291', "SquareSuperset;": '\U00002290', "SquareSupersetEqual;": '\U00002292', "SquareUnion;": '\U00002294', "Sscr;": '\U0001D4AE', "Star;": '\U000022C6', "Sub;": '\U000022D0', "Subset;": '\U000022D0', "SubsetEqual;": '\U00002286', "Succeeds;": '\U0000227B', "SucceedsEqual;": '\U00002AB0', "SucceedsSlantEqual;": '\U0000227D', "SucceedsTilde;": '\U0000227F', "SuchThat;": '\U0000220B', "Sum;": '\U00002211', "Sup;": '\U000022D1', "Superset;": '\U00002283', "SupersetEqual;": '\U00002287', "Supset;": '\U000022D1', "THORN;": '\U000000DE', "TRADE;": '\U00002122', "TSHcy;": '\U0000040B', "TScy;": '\U00000426', "Tab;": '\U00000009', "Tau;": '\U000003A4', "Tcaron;": '\U00000164', "Tcedil;": '\U00000162', "Tcy;": '\U00000422', "Tfr;": '\U0001D517', "Therefore;": '\U00002234', "Theta;": '\U00000398', "ThinSpace;": '\U00002009', "Tilde;": '\U0000223C', "TildeEqual;": '\U00002243', "TildeFullEqual;": '\U00002245', "TildeTilde;": '\U00002248', "Topf;": '\U0001D54B', "TripleDot;": '\U000020DB', "Tscr;": '\U0001D4AF', "Tstrok;": '\U00000166', "Uacute;": '\U000000DA', "Uarr;": '\U0000219F', "Uarrocir;": '\U00002949', "Ubrcy;": '\U0000040E', "Ubreve;": '\U0000016C', "Ucirc;": '\U000000DB', "Ucy;": '\U00000423', "Udblac;": '\U00000170', "Ufr;": '\U0001D518', "Ugrave;": '\U000000D9', "Umacr;": '\U0000016A', "UnderBar;": '\U0000005F', "UnderBrace;": '\U000023DF', "UnderBracket;": '\U000023B5', "UnderParenthesis;": '\U000023DD', "Union;": '\U000022C3', "UnionPlus;": '\U0000228E', "Uogon;": '\U00000172', "Uopf;": '\U0001D54C', "UpArrow;": '\U00002191', "UpArrowBar;": '\U00002912', "UpArrowDownArrow;": '\U000021C5', "UpDownArrow;": '\U00002195', "UpEquilibrium;": '\U0000296E', "UpTee;": '\U000022A5', "UpTeeArrow;": '\U000021A5', "Uparrow;": '\U000021D1', "Updownarrow;": '\U000021D5', "UpperLeftArrow;": '\U00002196', "UpperRightArrow;": '\U00002197', "Upsi;": '\U000003D2', "Upsilon;": '\U000003A5', "Uring;": '\U0000016E', "Uscr;": '\U0001D4B0', "Utilde;": '\U00000168', "Uuml;": '\U000000DC', "VDash;": '\U000022AB', "Vbar;": '\U00002AEB', "Vcy;": '\U00000412', "Vdash;": '\U000022A9', "Vdashl;": '\U00002AE6', "Vee;": '\U000022C1', "Verbar;": '\U00002016', "Vert;": '\U00002016', "VerticalBar;": '\U00002223', "VerticalLine;": '\U0000007C', "VerticalSeparator;": '\U00002758', "VerticalTilde;": '\U00002240', "VeryThinSpace;": '\U0000200A', "Vfr;": '\U0001D519', "Vopf;": '\U0001D54D', "Vscr;": '\U0001D4B1', "Vvdash;": '\U000022AA', "Wcirc;": '\U00000174', "Wedge;": '\U000022C0', "Wfr;": '\U0001D51A', "Wopf;": '\U0001D54E', "Wscr;": '\U0001D4B2', "Xfr;": '\U0001D51B', "Xi;": '\U0000039E', "Xopf;": '\U0001D54F', "Xscr;": '\U0001D4B3', "YAcy;": '\U0000042F', "YIcy;": '\U00000407', "YUcy;": '\U0000042E', "Yacute;": '\U000000DD', "Ycirc;": '\U00000176', "Ycy;": '\U0000042B', "Yfr;": '\U0001D51C', "Yopf;": '\U0001D550', "Yscr;": '\U0001D4B4', "Yuml;": '\U00000178', "ZHcy;": '\U00000416', "Zacute;": '\U00000179', "Zcaron;": '\U0000017D', "Zcy;": '\U00000417', "Zdot;": '\U0000017B', "ZeroWidthSpace;": '\U0000200B', "Zeta;": '\U00000396', "Zfr;": '\U00002128', "Zopf;": '\U00002124', "Zscr;": '\U0001D4B5', "aacute;": '\U000000E1', "abreve;": '\U00000103', "ac;": '\U0000223E', "acd;": '\U0000223F', "acirc;": '\U000000E2', "acute;": '\U000000B4', "acy;": '\U00000430', "aelig;": '\U000000E6', "af;": '\U00002061', "afr;": '\U0001D51E', "agrave;": '\U000000E0', "alefsym;": '\U00002135', "aleph;": '\U00002135', "alpha;": '\U000003B1', "amacr;": '\U00000101', "amalg;": '\U00002A3F', "amp;": '\U00000026', "and;": '\U00002227', "andand;": '\U00002A55', "andd;": '\U00002A5C', "andslope;": '\U00002A58', "andv;": '\U00002A5A', "ang;": '\U00002220', "ange;": '\U000029A4', "angle;": '\U00002220', "angmsd;": '\U00002221', "angmsdaa;": '\U000029A8', "angmsdab;": '\U000029A9', "angmsdac;": '\U000029AA', "angmsdad;": '\U000029AB', "angmsdae;": '\U000029AC', "angmsdaf;": '\U000029AD', "angmsdag;": '\U000029AE', "angmsdah;": '\U000029AF', "angrt;": '\U0000221F', "angrtvb;": '\U000022BE', "angrtvbd;": '\U0000299D', "angsph;": '\U00002222', "angst;": '\U000000C5', "angzarr;": '\U0000237C', "aogon;": '\U00000105', "aopf;": '\U0001D552', "ap;": '\U00002248', "apE;": '\U00002A70', "apacir;": '\U00002A6F', "ape;": '\U0000224A', "apid;": '\U0000224B', "apos;": '\U00000027', "approx;": '\U00002248', "approxeq;": '\U0000224A', "aring;": '\U000000E5', "ascr;": '\U0001D4B6', "ast;": '\U0000002A', "asymp;": '\U00002248', "asympeq;": '\U0000224D', "atilde;": '\U000000E3', "auml;": '\U000000E4', "awconint;": '\U00002233', "awint;": '\U00002A11', "bNot;": '\U00002AED', "backcong;": '\U0000224C', "backepsilon;": '\U000003F6', "backprime;": '\U00002035', "backsim;": '\U0000223D', "backsimeq;": '\U000022CD', "barvee;": '\U000022BD', "barwed;": '\U00002305', "barwedge;": '\U00002305', "bbrk;": '\U000023B5', "bbrktbrk;": '\U000023B6', "bcong;": '\U0000224C', "bcy;": '\U00000431', "bdquo;": '\U0000201E', "becaus;": '\U00002235', "because;": '\U00002235', "bemptyv;": '\U000029B0', "bepsi;": '\U000003F6', "bernou;": '\U0000212C', "beta;": '\U000003B2', "beth;": '\U00002136', "between;": '\U0000226C', "bfr;": '\U0001D51F', "bigcap;": '\U000022C2', "bigcirc;": '\U000025EF', "bigcup;": '\U000022C3', "bigodot;": '\U00002A00', "bigoplus;": '\U00002A01', "bigotimes;": '\U00002A02', "bigsqcup;": '\U00002A06', "bigstar;": '\U00002605', "bigtriangledown;": '\U000025BD', "bigtriangleup;": '\U000025B3', "biguplus;": '\U00002A04', "bigvee;": '\U000022C1', "bigwedge;": '\U000022C0', "bkarow;": '\U0000290D', "blacklozenge;": '\U000029EB', "blacksquare;": '\U000025AA', "blacktriangle;": '\U000025B4', "blacktriangledown;": '\U000025BE', "blacktriangleleft;": '\U000025C2', "blacktriangleright;": '\U000025B8', "blank;": '\U00002423', "blk12;": '\U00002592', "blk14;": '\U00002591', "blk34;": '\U00002593', "block;": '\U00002588', "bnot;": '\U00002310', "bopf;": '\U0001D553', "bot;": '\U000022A5', "bottom;": '\U000022A5', "bowtie;": '\U000022C8', "boxDL;": '\U00002557', "boxDR;": '\U00002554', "boxDl;": '\U00002556', "boxDr;": '\U00002553', "boxH;": '\U00002550', "boxHD;": '\U00002566', "boxHU;": '\U00002569', "boxHd;": '\U00002564', "boxHu;": '\U00002567', "boxUL;": '\U0000255D', "boxUR;": '\U0000255A', "boxUl;": '\U0000255C', "boxUr;": '\U00002559', "boxV;": '\U00002551', "boxVH;": '\U0000256C', "boxVL;": '\U00002563', "boxVR;": '\U00002560', "boxVh;": '\U0000256B', "boxVl;": '\U00002562', "boxVr;": '\U0000255F', "boxbox;": '\U000029C9', "boxdL;": '\U00002555', "boxdR;": '\U00002552', "boxdl;": '\U00002510', "boxdr;": '\U0000250C', "boxh;": '\U00002500', "boxhD;": '\U00002565', "boxhU;": '\U00002568', "boxhd;": '\U0000252C', "boxhu;": '\U00002534', "boxminus;": '\U0000229F', "boxplus;": '\U0000229E', "boxtimes;": '\U000022A0', "boxuL;": '\U0000255B', "boxuR;": '\U00002558', "boxul;": '\U00002518', "boxur;": '\U00002514', "boxv;": '\U00002502', "boxvH;": '\U0000256A', "boxvL;": '\U00002561', "boxvR;": '\U0000255E', "boxvh;": '\U0000253C', "boxvl;": '\U00002524', "boxvr;": '\U0000251C', "bprime;": '\U00002035', "breve;": '\U000002D8', "brvbar;": '\U000000A6', "bscr;": '\U0001D4B7', "bsemi;": '\U0000204F', "bsim;": '\U0000223D', "bsime;": '\U000022CD', "bsol;": '\U0000005C', "bsolb;": '\U000029C5', "bsolhsub;": '\U000027C8', "bull;": '\U00002022', "bullet;": '\U00002022', "bump;": '\U0000224E', "bumpE;": '\U00002AAE', "bumpe;": '\U0000224F', "bumpeq;": '\U0000224F', "cacute;": '\U00000107', "cap;": '\U00002229', "capand;": '\U00002A44', "capbrcup;": '\U00002A49', "capcap;": '\U00002A4B', "capcup;": '\U00002A47', "capdot;": '\U00002A40', "caret;": '\U00002041', "caron;": '\U000002C7', "ccaps;": '\U00002A4D', "ccaron;": '\U0000010D', "ccedil;": '\U000000E7', "ccirc;": '\U00000109', "ccups;": '\U00002A4C', "ccupssm;": '\U00002A50', "cdot;": '\U0000010B', "cedil;": '\U000000B8', "cemptyv;": '\U000029B2', "cent;": '\U000000A2', "centerdot;": '\U000000B7', "cfr;": '\U0001D520', "chcy;": '\U00000447', "check;": '\U00002713', "checkmark;": '\U00002713', "chi;": '\U000003C7', "cir;": '\U000025CB', "cirE;": '\U000029C3', "circ;": '\U000002C6', "circeq;": '\U00002257', "circlearrowleft;": '\U000021BA', "circlearrowright;": '\U000021BB', "circledR;": '\U000000AE', "circledS;": '\U000024C8', "circledast;": '\U0000229B', "circledcirc;": '\U0000229A', "circleddash;": '\U0000229D', "cire;": '\U00002257', "cirfnint;": '\U00002A10', "cirmid;": '\U00002AEF', "cirscir;": '\U000029C2', "clubs;": '\U00002663', "clubsuit;": '\U00002663', "colon;": '\U0000003A', "colone;": '\U00002254', "coloneq;": '\U00002254', "comma;": '\U0000002C', "commat;": '\U00000040', "comp;": '\U00002201', "compfn;": '\U00002218', "complement;": '\U00002201', "complexes;": '\U00002102', "cong;": '\U00002245', "congdot;": '\U00002A6D', "conint;": '\U0000222E', "copf;": '\U0001D554', "coprod;": '\U00002210', "copy;": '\U000000A9', "copysr;": '\U00002117', "crarr;": '\U000021B5', "cross;": '\U00002717', "cscr;": '\U0001D4B8', "csub;": '\U00002ACF', "csube;": '\U00002AD1', "csup;": '\U00002AD0', "csupe;": '\U00002AD2', "ctdot;": '\U000022EF', "cudarrl;": '\U00002938', "cudarrr;": '\U00002935', "cuepr;": '\U000022DE', "cuesc;": '\U000022DF', "cularr;": '\U000021B6', "cularrp;": '\U0000293D', "cup;": '\U0000222A', "cupbrcap;": '\U00002A48', "cupcap;": '\U00002A46', "cupcup;": '\U00002A4A', "cupdot;": '\U0000228D', "cupor;": '\U00002A45', "curarr;": '\U000021B7', "curarrm;": '\U0000293C', "curlyeqprec;": '\U000022DE', "curlyeqsucc;": '\U000022DF', "curlyvee;": '\U000022CE', "curlywedge;": '\U000022CF', "curren;": '\U000000A4', "curvearrowleft;": '\U000021B6', "curvearrowright;": '\U000021B7', "cuvee;": '\U000022CE', "cuwed;": '\U000022CF', "cwconint;": '\U00002232', "cwint;": '\U00002231', "cylcty;": '\U0000232D', "dArr;": '\U000021D3', "dHar;": '\U00002965', "dagger;": '\U00002020', "daleth;": '\U00002138', "darr;": '\U00002193', "dash;": '\U00002010', "dashv;": '\U000022A3', "dbkarow;": '\U0000290F', "dblac;": '\U000002DD', "dcaron;": '\U0000010F', "dcy;": '\U00000434', "dd;": '\U00002146', "ddagger;": '\U00002021', "ddarr;": '\U000021CA', "ddotseq;": '\U00002A77', "deg;": '\U000000B0', "delta;": '\U000003B4', "demptyv;": '\U000029B1', "dfisht;": '\U0000297F', "dfr;": '\U0001D521', "dharl;": '\U000021C3', "dharr;": '\U000021C2', "diam;": '\U000022C4', "diamond;": '\U000022C4', "diamondsuit;": '\U00002666', "diams;": '\U00002666', "die;": '\U000000A8', "digamma;": '\U000003DD', "disin;": '\U000022F2', "div;": '\U000000F7', "divide;": '\U000000F7', "divideontimes;": '\U000022C7', "divonx;": '\U000022C7', "djcy;": '\U00000452', "dlcorn;": '\U0000231E', "dlcrop;": '\U0000230D', "dollar;": '\U00000024', "dopf;": '\U0001D555', "dot;": '\U000002D9', "doteq;": '\U00002250', "doteqdot;": '\U00002251', "dotminus;": '\U00002238', "dotplus;": '\U00002214', "dotsquare;": '\U000022A1', "doublebarwedge;": '\U00002306', "downarrow;": '\U00002193', "downdownarrows;": '\U000021CA', "downharpoonleft;": '\U000021C3', "downharpoonright;": '\U000021C2', "drbkarow;": '\U00002910', "drcorn;": '\U0000231F', "drcrop;": '\U0000230C', "dscr;": '\U0001D4B9', "dscy;": '\U00000455', "dsol;": '\U000029F6', "dstrok;": '\U00000111', "dtdot;": '\U000022F1', "dtri;": '\U000025BF', "dtrif;": '\U000025BE', "duarr;": '\U000021F5', "duhar;": '\U0000296F', "dwangle;": '\U000029A6', "dzcy;": '\U0000045F', "dzigrarr;": '\U000027FF', "eDDot;": '\U00002A77', "eDot;": '\U00002251', "eacute;": '\U000000E9', "easter;": '\U00002A6E', "ecaron;": '\U0000011B', "ecir;": '\U00002256', "ecirc;": '\U000000EA', "ecolon;": '\U00002255', "ecy;": '\U0000044D', "edot;": '\U00000117', "ee;": '\U00002147', "efDot;": '\U00002252', "efr;": '\U0001D522', "eg;": '\U00002A9A', "egrave;": '\U000000E8', "egs;": '\U00002A96', "egsdot;": '\U00002A98', "el;": '\U00002A99', "elinters;": '\U000023E7', "ell;": '\U00002113', "els;": '\U00002A95', "elsdot;": '\U00002A97', "emacr;": '\U00000113', "empty;": '\U00002205', "emptyset;": '\U00002205', "emptyv;": '\U00002205', "emsp;": '\U00002003', "emsp13;": '\U00002004', "emsp14;": '\U00002005', "eng;": '\U0000014B', "ensp;": '\U00002002', "eogon;": '\U00000119', "eopf;": '\U0001D556', "epar;": '\U000022D5', "eparsl;": '\U000029E3', "eplus;": '\U00002A71', "epsi;": '\U000003B5', "epsilon;": '\U000003B5', "epsiv;": '\U000003F5', "eqcirc;": '\U00002256', "eqcolon;": '\U00002255', "eqsim;": '\U00002242', "eqslantgtr;": '\U00002A96', "eqslantless;": '\U00002A95', "equals;": '\U0000003D', "equest;": '\U0000225F', "equiv;": '\U00002261', "equivDD;": '\U00002A78', "eqvparsl;": '\U000029E5', "erDot;": '\U00002253', "erarr;": '\U00002971', "escr;": '\U0000212F', "esdot;": '\U00002250', "esim;": '\U00002242', "eta;": '\U000003B7', "eth;": '\U000000F0', "euml;": '\U000000EB', "euro;": '\U000020AC', "excl;": '\U00000021', "exist;": '\U00002203', "expectation;": '\U00002130', "exponentiale;": '\U00002147', "fallingdotseq;": '\U00002252', "fcy;": '\U00000444', "female;": '\U00002640', "ffilig;": '\U0000FB03', "fflig;": '\U0000FB00', "ffllig;": '\U0000FB04', "ffr;": '\U0001D523', "filig;": '\U0000FB01', "flat;": '\U0000266D', "fllig;": '\U0000FB02', "fltns;": '\U000025B1', "fnof;": '\U00000192', "fopf;": '\U0001D557', "forall;": '\U00002200', "fork;": '\U000022D4', "forkv;": '\U00002AD9', "fpartint;": '\U00002A0D', "frac12;": '\U000000BD', "frac13;": '\U00002153', "frac14;": '\U000000BC', "frac15;": '\U00002155', "frac16;": '\U00002159', "frac18;": '\U0000215B', "frac23;": '\U00002154', "frac25;": '\U00002156', "frac34;": '\U000000BE', "frac35;": '\U00002157', "frac38;": '\U0000215C', "frac45;": '\U00002158', "frac56;": '\U0000215A', "frac58;": '\U0000215D', "frac78;": '\U0000215E', "frasl;": '\U00002044', "frown;": '\U00002322', "fscr;": '\U0001D4BB', "gE;": '\U00002267', "gEl;": '\U00002A8C', "gacute;": '\U000001F5', "gamma;": '\U000003B3', "gammad;": '\U000003DD', "gap;": '\U00002A86', "gbreve;": '\U0000011F', "gcirc;": '\U0000011D', "gcy;": '\U00000433', "gdot;": '\U00000121', "ge;": '\U00002265', "gel;": '\U000022DB', "geq;": '\U00002265', "geqq;": '\U00002267', "geqslant;": '\U00002A7E', "ges;": '\U00002A7E', "gescc;": '\U00002AA9', "gesdot;": '\U00002A80', "gesdoto;": '\U00002A82', "gesdotol;": '\U00002A84', "gesles;": '\U00002A94', "gfr;": '\U0001D524', "gg;": '\U0000226B', "ggg;": '\U000022D9', "gimel;": '\U00002137', "gjcy;": '\U00000453', "gl;": '\U00002277', "glE;": '\U00002A92', "gla;": '\U00002AA5', "glj;": '\U00002AA4', "gnE;": '\U00002269', "gnap;": '\U00002A8A', "gnapprox;": '\U00002A8A', "gne;": '\U00002A88', "gneq;": '\U00002A88', "gneqq;": '\U00002269', "gnsim;": '\U000022E7', "gopf;": '\U0001D558', "grave;": '\U00000060', "gscr;": '\U0000210A', "gsim;": '\U00002273', "gsime;": '\U00002A8E', "gsiml;": '\U00002A90', "gt;": '\U0000003E', "gtcc;": '\U00002AA7', "gtcir;": '\U00002A7A', "gtdot;": '\U000022D7', "gtlPar;": '\U00002995', "gtquest;": '\U00002A7C', "gtrapprox;": '\U00002A86', "gtrarr;": '\U00002978', "gtrdot;": '\U000022D7', "gtreqless;": '\U000022DB', "gtreqqless;": '\U00002A8C', "gtrless;": '\U00002277', "gtrsim;": '\U00002273', "hArr;": '\U000021D4', "hairsp;": '\U0000200A', "half;": '\U000000BD', "hamilt;": '\U0000210B', "hardcy;": '\U0000044A', "harr;": '\U00002194', "harrcir;": '\U00002948', "harrw;": '\U000021AD', "hbar;": '\U0000210F', "hcirc;": '\U00000125', "hearts;": '\U00002665', "heartsuit;": '\U00002665', "hellip;": '\U00002026', "hercon;": '\U000022B9', "hfr;": '\U0001D525', "hksearow;": '\U00002925', "hkswarow;": '\U00002926', "hoarr;": '\U000021FF', "homtht;": '\U0000223B', "hookleftarrow;": '\U000021A9', "hookrightarrow;": '\U000021AA', "hopf;": '\U0001D559', "horbar;": '\U00002015', "hscr;": '\U0001D4BD', "hslash;": '\U0000210F', "hstrok;": '\U00000127', "hybull;": '\U00002043', "hyphen;": '\U00002010', "iacute;": '\U000000ED', "ic;": '\U00002063', "icirc;": '\U000000EE', "icy;": '\U00000438', "iecy;": '\U00000435', "iexcl;": '\U000000A1', "iff;": '\U000021D4', "ifr;": '\U0001D526', "igrave;": '\U000000EC', "ii;": '\U00002148', "iiiint;": '\U00002A0C', "iiint;": '\U0000222D', "iinfin;": '\U000029DC', "iiota;": '\U00002129', "ijlig;": '\U00000133', "imacr;": '\U0000012B', "image;": '\U00002111', "imagline;": '\U00002110', "imagpart;": '\U00002111', "imath;": '\U00000131', "imof;": '\U000022B7', "imped;": '\U000001B5', "in;": '\U00002208', "incare;": '\U00002105', "infin;": '\U0000221E', "infintie;": '\U000029DD', "inodot;": '\U00000131', "int;": '\U0000222B', "intcal;": '\U000022BA', "integers;": '\U00002124', "intercal;": '\U000022BA', "intlarhk;": '\U00002A17', "intprod;": '\U00002A3C', "iocy;": '\U00000451', "iogon;": '\U0000012F', "iopf;": '\U0001D55A', "iota;": '\U000003B9', "iprod;": '\U00002A3C', "iquest;": '\U000000BF', "iscr;": '\U0001D4BE', "isin;": '\U00002208', "isinE;": '\U000022F9', "isindot;": '\U000022F5', "isins;": '\U000022F4', "isinsv;": '\U000022F3', "isinv;": '\U00002208', "it;": '\U00002062', "itilde;": '\U00000129', "iukcy;": '\U00000456', "iuml;": '\U000000EF', "jcirc;": '\U00000135', "jcy;": '\U00000439', "jfr;": '\U0001D527', "jmath;": '\U00000237', "jopf;": '\U0001D55B', "jscr;": '\U0001D4BF', "jsercy;": '\U00000458', "jukcy;": '\U00000454', "kappa;": '\U000003BA', "kappav;": '\U000003F0', "kcedil;": '\U00000137', "kcy;": '\U0000043A', "kfr;": '\U0001D528', "kgreen;": '\U00000138', "khcy;": '\U00000445', "kjcy;": '\U0000045C', "kopf;": '\U0001D55C', "kscr;": '\U0001D4C0', "lAarr;": '\U000021DA', "lArr;": '\U000021D0', "lAtail;": '\U0000291B', "lBarr;": '\U0000290E', "lE;": '\U00002266', "lEg;": '\U00002A8B', "lHar;": '\U00002962', "lacute;": '\U0000013A', "laemptyv;": '\U000029B4', "lagran;": '\U00002112', "lambda;": '\U000003BB', "lang;": '\U000027E8', "langd;": '\U00002991', "langle;": '\U000027E8', "lap;": '\U00002A85', "laquo;": '\U000000AB', "larr;": '\U00002190', "larrb;": '\U000021E4', "larrbfs;": '\U0000291F', "larrfs;": '\U0000291D', "larrhk;": '\U000021A9', "larrlp;": '\U000021AB', "larrpl;": '\U00002939', "larrsim;": '\U00002973', "larrtl;": '\U000021A2', "lat;": '\U00002AAB', "latail;": '\U00002919', "late;": '\U00002AAD', "lbarr;": '\U0000290C', "lbbrk;": '\U00002772', "lbrace;": '\U0000007B', "lbrack;": '\U0000005B', "lbrke;": '\U0000298B', "lbrksld;": '\U0000298F', "lbrkslu;": '\U0000298D', "lcaron;": '\U0000013E', "lcedil;": '\U0000013C', "lceil;": '\U00002308', "lcub;": '\U0000007B', "lcy;": '\U0000043B', "ldca;": '\U00002936', "ldquo;": '\U0000201C', "ldquor;": '\U0000201E', "ldrdhar;": '\U00002967', "ldrushar;": '\U0000294B', "ldsh;": '\U000021B2', "le;": '\U00002264', "leftarrow;": '\U00002190', "leftarrowtail;": '\U000021A2', "leftharpoondown;": '\U000021BD', "leftharpoonup;": '\U000021BC', "leftleftarrows;": '\U000021C7', "leftrightarrow;": '\U00002194', "leftrightarrows;": '\U000021C6', "leftrightharpoons;": '\U000021CB', "leftrightsquigarrow;": '\U000021AD', "leftthreetimes;": '\U000022CB', "leg;": '\U000022DA', "leq;": '\U00002264', "leqq;": '\U00002266', "leqslant;": '\U00002A7D', "les;": '\U00002A7D', "lescc;": '\U00002AA8', "lesdot;": '\U00002A7F', "lesdoto;": '\U00002A81', "lesdotor;": '\U00002A83', "lesges;": '\U00002A93', "lessapprox;": '\U00002A85', "lessdot;": '\U000022D6', "lesseqgtr;": '\U000022DA', "lesseqqgtr;": '\U00002A8B', "lessgtr;": '\U00002276', "lesssim;": '\U00002272', "lfisht;": '\U0000297C', "lfloor;": '\U0000230A', "lfr;": '\U0001D529', "lg;": '\U00002276', "lgE;": '\U00002A91', "lhard;": '\U000021BD', "lharu;": '\U000021BC', "lharul;": '\U0000296A', "lhblk;": '\U00002584', "ljcy;": '\U00000459', "ll;": '\U0000226A', "llarr;": '\U000021C7', "llcorner;": '\U0000231E', "llhard;": '\U0000296B', "lltri;": '\U000025FA', "lmidot;": '\U00000140', "lmoust;": '\U000023B0', "lmoustache;": '\U000023B0', "lnE;": '\U00002268', "lnap;": '\U00002A89', "lnapprox;": '\U00002A89', "lne;": '\U00002A87', "lneq;": '\U00002A87', "lneqq;": '\U00002268', "lnsim;": '\U000022E6', "loang;": '\U000027EC', "loarr;": '\U000021FD', "lobrk;": '\U000027E6', "longleftarrow;": '\U000027F5', "longleftrightarrow;": '\U000027F7', "longmapsto;": '\U000027FC', "longrightarrow;": '\U000027F6', "looparrowleft;": '\U000021AB', "looparrowright;": '\U000021AC', "lopar;": '\U00002985', "lopf;": '\U0001D55D', "loplus;": '\U00002A2D', "lotimes;": '\U00002A34', "lowast;": '\U00002217', "lowbar;": '\U0000005F', "loz;": '\U000025CA', "lozenge;": '\U000025CA', "lozf;": '\U000029EB', "lpar;": '\U00000028', "lparlt;": '\U00002993', "lrarr;": '\U000021C6', "lrcorner;": '\U0000231F', "lrhar;": '\U000021CB', "lrhard;": '\U0000296D', "lrm;": '\U0000200E', "lrtri;": '\U000022BF', "lsaquo;": '\U00002039', "lscr;": '\U0001D4C1', "lsh;": '\U000021B0', "lsim;": '\U00002272', "lsime;": '\U00002A8D', "lsimg;": '\U00002A8F', "lsqb;": '\U0000005B', "lsquo;": '\U00002018', "lsquor;": '\U0000201A', "lstrok;": '\U00000142', "lt;": '\U0000003C', "ltcc;": '\U00002AA6', "ltcir;": '\U00002A79', "ltdot;": '\U000022D6', "lthree;": '\U000022CB', "ltimes;": '\U000022C9', "ltlarr;": '\U00002976', "ltquest;": '\U00002A7B', "ltrPar;": '\U00002996', "ltri;": '\U000025C3', "ltrie;": '\U000022B4', "ltrif;": '\U000025C2', "lurdshar;": '\U0000294A', "luruhar;": '\U00002966', "mDDot;": '\U0000223A', "macr;": '\U000000AF', "male;": '\U00002642', "malt;": '\U00002720', "maltese;": '\U00002720', "map;": '\U000021A6', "mapsto;": '\U000021A6', "mapstodown;": '\U000021A7', "mapstoleft;": '\U000021A4', "mapstoup;": '\U000021A5', "marker;": '\U000025AE', "mcomma;": '\U00002A29', "mcy;": '\U0000043C', "mdash;": '\U00002014', "measuredangle;": '\U00002221', "mfr;": '\U0001D52A', "mho;": '\U00002127', "micro;": '\U000000B5', "mid;": '\U00002223', "midast;": '\U0000002A', "midcir;": '\U00002AF0', "middot;": '\U000000B7', "minus;": '\U00002212', "minusb;": '\U0000229F', "minusd;": '\U00002238', "minusdu;": '\U00002A2A', "mlcp;": '\U00002ADB', "mldr;": '\U00002026', "mnplus;": '\U00002213', "models;": '\U000022A7', "mopf;": '\U0001D55E', "mp;": '\U00002213', "mscr;": '\U0001D4C2', "mstpos;": '\U0000223E', "mu;": '\U000003BC', "multimap;": '\U000022B8', "mumap;": '\U000022B8', "nLeftarrow;": '\U000021CD', "nLeftrightarrow;": '\U000021CE', "nRightarrow;": '\U000021CF', "nVDash;": '\U000022AF', "nVdash;": '\U000022AE', "nabla;": '\U00002207', "nacute;": '\U00000144', "nap;": '\U00002249', "napos;": '\U00000149', "napprox;": '\U00002249', "natur;": '\U0000266E', "natural;": '\U0000266E', "naturals;": '\U00002115', "nbsp;": '\U000000A0', "ncap;": '\U00002A43', "ncaron;": '\U00000148', "ncedil;": '\U00000146', "ncong;": '\U00002247', "ncup;": '\U00002A42', "ncy;": '\U0000043D', "ndash;": '\U00002013', "ne;": '\U00002260', "neArr;": '\U000021D7', "nearhk;": '\U00002924', "nearr;": '\U00002197', "nearrow;": '\U00002197', "nequiv;": '\U00002262', "nesear;": '\U00002928', "nexist;": '\U00002204', "nexists;": '\U00002204', "nfr;": '\U0001D52B', "nge;": '\U00002271', "ngeq;": '\U00002271', "ngsim;": '\U00002275', "ngt;": '\U0000226F', "ngtr;": '\U0000226F', "nhArr;": '\U000021CE', "nharr;": '\U000021AE', "nhpar;": '\U00002AF2', "ni;": '\U0000220B', "nis;": '\U000022FC', "nisd;": '\U000022FA', "niv;": '\U0000220B', "njcy;": '\U0000045A', "nlArr;": '\U000021CD', "nlarr;": '\U0000219A', "nldr;": '\U00002025', "nle;": '\U00002270', "nleftarrow;": '\U0000219A', "nleftrightarrow;": '\U000021AE', "nleq;": '\U00002270', "nless;": '\U0000226E', "nlsim;": '\U00002274', "nlt;": '\U0000226E', "nltri;": '\U000022EA', "nltrie;": '\U000022EC', "nmid;": '\U00002224', "nopf;": '\U0001D55F', "not;": '\U000000AC', "notin;": '\U00002209', "notinva;": '\U00002209', "notinvb;": '\U000022F7', "notinvc;": '\U000022F6', "notni;": '\U0000220C', "notniva;": '\U0000220C', "notnivb;": '\U000022FE', "notnivc;": '\U000022FD', "npar;": '\U00002226', "nparallel;": '\U00002226', "npolint;": '\U00002A14', "npr;": '\U00002280', "nprcue;": '\U000022E0', "nprec;": '\U00002280', "nrArr;": '\U000021CF', "nrarr;": '\U0000219B', "nrightarrow;": '\U0000219B', "nrtri;": '\U000022EB', "nrtrie;": '\U000022ED', "nsc;": '\U00002281', "nsccue;": '\U000022E1', "nscr;": '\U0001D4C3', "nshortmid;": '\U00002224', "nshortparallel;": '\U00002226', "nsim;": '\U00002241', "nsime;": '\U00002244', "nsimeq;": '\U00002244', "nsmid;": '\U00002224', "nspar;": '\U00002226', "nsqsube;": '\U000022E2', "nsqsupe;": '\U000022E3', "nsub;": '\U00002284', "nsube;": '\U00002288', "nsubseteq;": '\U00002288', "nsucc;": '\U00002281', "nsup;": '\U00002285', "nsupe;": '\U00002289', "nsupseteq;": '\U00002289', "ntgl;": '\U00002279', "ntilde;": '\U000000F1', "ntlg;": '\U00002278', "ntriangleleft;": '\U000022EA', "ntrianglelefteq;": '\U000022EC', "ntriangleright;": '\U000022EB', "ntrianglerighteq;": '\U000022ED', "nu;": '\U000003BD', "num;": '\U00000023', "numero;": '\U00002116', "numsp;": '\U00002007', "nvDash;": '\U000022AD', "nvHarr;": '\U00002904', "nvdash;": '\U000022AC', "nvinfin;": '\U000029DE', "nvlArr;": '\U00002902', "nvrArr;": '\U00002903', "nwArr;": '\U000021D6', "nwarhk;": '\U00002923', "nwarr;": '\U00002196', "nwarrow;": '\U00002196', "nwnear;": '\U00002927', "oS;": '\U000024C8', "oacute;": '\U000000F3', "oast;": '\U0000229B', "ocir;": '\U0000229A', "ocirc;": '\U000000F4', "ocy;": '\U0000043E', "odash;": '\U0000229D', "odblac;": '\U00000151', "odiv;": '\U00002A38', "odot;": '\U00002299', "odsold;": '\U000029BC', "oelig;": '\U00000153', "ofcir;": '\U000029BF', "ofr;": '\U0001D52C', "ogon;": '\U000002DB', "ograve;": '\U000000F2', "ogt;": '\U000029C1', "ohbar;": '\U000029B5', "ohm;": '\U000003A9', "oint;": '\U0000222E', "olarr;": '\U000021BA', "olcir;": '\U000029BE', "olcross;": '\U000029BB', "oline;": '\U0000203E', "olt;": '\U000029C0', "omacr;": '\U0000014D', "omega;": '\U000003C9', "omicron;": '\U000003BF', "omid;": '\U000029B6', "ominus;": '\U00002296', "oopf;": '\U0001D560', "opar;": '\U000029B7', "operp;": '\U000029B9', "oplus;": '\U00002295', "or;": '\U00002228', "orarr;": '\U000021BB', "ord;": '\U00002A5D', "order;": '\U00002134', "orderof;": '\U00002134', "ordf;": '\U000000AA', "ordm;": '\U000000BA', "origof;": '\U000022B6', "oror;": '\U00002A56', "orslope;": '\U00002A57', "orv;": '\U00002A5B', "oscr;": '\U00002134', "oslash;": '\U000000F8', "osol;": '\U00002298', "otilde;": '\U000000F5', "otimes;": '\U00002297', "otimesas;": '\U00002A36', "ouml;": '\U000000F6', "ovbar;": '\U0000233D', "par;": '\U00002225', "para;": '\U000000B6', "parallel;": '\U00002225', "parsim;": '\U00002AF3', "parsl;": '\U00002AFD', "part;": '\U00002202', "pcy;": '\U0000043F', "percnt;": '\U00000025', "period;": '\U0000002E', "permil;": '\U00002030', "perp;": '\U000022A5', "pertenk;": '\U00002031', "pfr;": '\U0001D52D', "phi;": '\U000003C6', "phiv;": '\U000003D5', "phmmat;": '\U00002133', "phone;": '\U0000260E', "pi;": '\U000003C0', "pitchfork;": '\U000022D4', "piv;": '\U000003D6', "planck;": '\U0000210F', "planckh;": '\U0000210E', "plankv;": '\U0000210F', "plus;": '\U0000002B', "plusacir;": '\U00002A23', "plusb;": '\U0000229E', "pluscir;": '\U00002A22', "plusdo;": '\U00002214', "plusdu;": '\U00002A25', "pluse;": '\U00002A72', "plusmn;": '\U000000B1', "plussim;": '\U00002A26', "plustwo;": '\U00002A27', "pm;": '\U000000B1', "pointint;": '\U00002A15', "popf;": '\U0001D561', "pound;": '\U000000A3', "pr;": '\U0000227A', "prE;": '\U00002AB3', "prap;": '\U00002AB7', "prcue;": '\U0000227C', "pre;": '\U00002AAF', "prec;": '\U0000227A', "precapprox;": '\U00002AB7', "preccurlyeq;": '\U0000227C', "preceq;": '\U00002AAF', "precnapprox;": '\U00002AB9', "precneqq;": '\U00002AB5', "precnsim;": '\U000022E8', "precsim;": '\U0000227E', "prime;": '\U00002032', "primes;": '\U00002119', "prnE;": '\U00002AB5', "prnap;": '\U00002AB9', "prnsim;": '\U000022E8', "prod;": '\U0000220F', "profalar;": '\U0000232E', "profline;": '\U00002312', "profsurf;": '\U00002313', "prop;": '\U0000221D', "propto;": '\U0000221D', "prsim;": '\U0000227E', "prurel;": '\U000022B0', "pscr;": '\U0001D4C5', "psi;": '\U000003C8', "puncsp;": '\U00002008', "qfr;": '\U0001D52E', "qint;": '\U00002A0C', "qopf;": '\U0001D562', "qprime;": '\U00002057', "qscr;": '\U0001D4C6', "quaternions;": '\U0000210D', "quatint;": '\U00002A16', "quest;": '\U0000003F', "questeq;": '\U0000225F', "quot;": '\U00000022', "rAarr;": '\U000021DB', "rArr;": '\U000021D2', "rAtail;": '\U0000291C', "rBarr;": '\U0000290F', "rHar;": '\U00002964', "racute;": '\U00000155', "radic;": '\U0000221A', "raemptyv;": '\U000029B3', "rang;": '\U000027E9', "rangd;": '\U00002992', "range;": '\U000029A5', "rangle;": '\U000027E9', "raquo;": '\U000000BB', "rarr;": '\U00002192', "rarrap;": '\U00002975', "rarrb;": '\U000021E5', "rarrbfs;": '\U00002920', "rarrc;": '\U00002933', "rarrfs;": '\U0000291E', "rarrhk;": '\U000021AA', "rarrlp;": '\U000021AC', "rarrpl;": '\U00002945', "rarrsim;": '\U00002974', "rarrtl;": '\U000021A3', "rarrw;": '\U0000219D', "ratail;": '\U0000291A', "ratio;": '\U00002236', "rationals;": '\U0000211A', "rbarr;": '\U0000290D', "rbbrk;": '\U00002773', "rbrace;": '\U0000007D', "rbrack;": '\U0000005D', "rbrke;": '\U0000298C', "rbrksld;": '\U0000298E', "rbrkslu;": '\U00002990', "rcaron;": '\U00000159', "rcedil;": '\U00000157', "rceil;": '\U00002309', "rcub;": '\U0000007D', "rcy;": '\U00000440', "rdca;": '\U00002937', "rdldhar;": '\U00002969', "rdquo;": '\U0000201D', "rdquor;": '\U0000201D', "rdsh;": '\U000021B3', "real;": '\U0000211C', "realine;": '\U0000211B', "realpart;": '\U0000211C', "reals;": '\U0000211D', "rect;": '\U000025AD', "reg;": '\U000000AE', "rfisht;": '\U0000297D', "rfloor;": '\U0000230B', "rfr;": '\U0001D52F', "rhard;": '\U000021C1', "rharu;": '\U000021C0', "rharul;": '\U0000296C', "rho;": '\U000003C1', "rhov;": '\U000003F1', "rightarrow;": '\U00002192', "rightarrowtail;": '\U000021A3', "rightharpoondown;": '\U000021C1', "rightharpoonup;": '\U000021C0', "rightleftarrows;": '\U000021C4', "rightleftharpoons;": '\U000021CC', "rightrightarrows;": '\U000021C9', "rightsquigarrow;": '\U0000219D', "rightthreetimes;": '\U000022CC', "ring;": '\U000002DA', "risingdotseq;": '\U00002253', "rlarr;": '\U000021C4', "rlhar;": '\U000021CC', "rlm;": '\U0000200F', "rmoust;": '\U000023B1', "rmoustache;": '\U000023B1', "rnmid;": '\U00002AEE', "roang;": '\U000027ED', "roarr;": '\U000021FE', "robrk;": '\U000027E7', "ropar;": '\U00002986', "ropf;": '\U0001D563', "roplus;": '\U00002A2E', "rotimes;": '\U00002A35', "rpar;": '\U00000029', "rpargt;": '\U00002994', "rppolint;": '\U00002A12', "rrarr;": '\U000021C9', "rsaquo;": '\U0000203A', "rscr;": '\U0001D4C7', "rsh;": '\U000021B1', "rsqb;": '\U0000005D', "rsquo;": '\U00002019', "rsquor;": '\U00002019', "rthree;": '\U000022CC', "rtimes;": '\U000022CA', "rtri;": '\U000025B9', "rtrie;": '\U000022B5', "rtrif;": '\U000025B8', "rtriltri;": '\U000029CE', "ruluhar;": '\U00002968', "rx;": '\U0000211E', "sacute;": '\U0000015B', "sbquo;": '\U0000201A', "sc;": '\U0000227B', "scE;": '\U00002AB4', "scap;": '\U00002AB8', "scaron;": '\U00000161', "sccue;": '\U0000227D', "sce;": '\U00002AB0', "scedil;": '\U0000015F', "scirc;": '\U0000015D', "scnE;": '\U00002AB6', "scnap;": '\U00002ABA', "scnsim;": '\U000022E9', "scpolint;": '\U00002A13', "scsim;": '\U0000227F', "scy;": '\U00000441', "sdot;": '\U000022C5', "sdotb;": '\U000022A1', "sdote;": '\U00002A66', "seArr;": '\U000021D8', "searhk;": '\U00002925', "searr;": '\U00002198', "searrow;": '\U00002198', "sect;": '\U000000A7', "semi;": '\U0000003B', "seswar;": '\U00002929', "setminus;": '\U00002216', "setmn;": '\U00002216', "sext;": '\U00002736', "sfr;": '\U0001D530', "sfrown;": '\U00002322', "sharp;": '\U0000266F', "shchcy;": '\U00000449', "shcy;": '\U00000448', "shortmid;": '\U00002223', "shortparallel;": '\U00002225', "shy;": '\U000000AD', "sigma;": '\U000003C3', "sigmaf;": '\U000003C2', "sigmav;": '\U000003C2', "sim;": '\U0000223C', "simdot;": '\U00002A6A', "sime;": '\U00002243', "simeq;": '\U00002243', "simg;": '\U00002A9E', "simgE;": '\U00002AA0', "siml;": '\U00002A9D', "simlE;": '\U00002A9F', "simne;": '\U00002246', "simplus;": '\U00002A24', "simrarr;": '\U00002972', "slarr;": '\U00002190', "smallsetminus;": '\U00002216', "smashp;": '\U00002A33', "smeparsl;": '\U000029E4', "smid;": '\U00002223', "smile;": '\U00002323', "smt;": '\U00002AAA', "smte;": '\U00002AAC', "softcy;": '\U0000044C', "sol;": '\U0000002F', "solb;": '\U000029C4', "solbar;": '\U0000233F', "sopf;": '\U0001D564', "spades;": '\U00002660', "spadesuit;": '\U00002660', "spar;": '\U00002225', "sqcap;": '\U00002293', "sqcup;": '\U00002294', "sqsub;": '\U0000228F', "sqsube;": '\U00002291', "sqsubset;": '\U0000228F', "sqsubseteq;": '\U00002291', "sqsup;": '\U00002290', "sqsupe;": '\U00002292', "sqsupset;": '\U00002290', "sqsupseteq;": '\U00002292', "squ;": '\U000025A1', "square;": '\U000025A1', "squarf;": '\U000025AA', "squf;": '\U000025AA', "srarr;": '\U00002192', "sscr;": '\U0001D4C8', "ssetmn;": '\U00002216', "ssmile;": '\U00002323', "sstarf;": '\U000022C6', "star;": '\U00002606', "starf;": '\U00002605', "straightepsilon;": '\U000003F5', "straightphi;": '\U000003D5', "strns;": '\U000000AF', "sub;": '\U00002282', "subE;": '\U00002AC5', "subdot;": '\U00002ABD', "sube;": '\U00002286', "subedot;": '\U00002AC3', "submult;": '\U00002AC1', "subnE;": '\U00002ACB', "subne;": '\U0000228A', "subplus;": '\U00002ABF', "subrarr;": '\U00002979', "subset;": '\U00002282', "subseteq;": '\U00002286', "subseteqq;": '\U00002AC5', "subsetneq;": '\U0000228A', "subsetneqq;": '\U00002ACB', "subsim;": '\U00002AC7', "subsub;": '\U00002AD5', "subsup;": '\U00002AD3', "succ;": '\U0000227B', "succapprox;": '\U00002AB8', "succcurlyeq;": '\U0000227D', "succeq;": '\U00002AB0', "succnapprox;": '\U00002ABA', "succneqq;": '\U00002AB6', "succnsim;": '\U000022E9', "succsim;": '\U0000227F', "sum;": '\U00002211', "sung;": '\U0000266A', "sup;": '\U00002283', "sup1;": '\U000000B9', "sup2;": '\U000000B2', "sup3;": '\U000000B3', "supE;": '\U00002AC6', "supdot;": '\U00002ABE', "supdsub;": '\U00002AD8', "supe;": '\U00002287', "supedot;": '\U00002AC4', "suphsol;": '\U000027C9', "suphsub;": '\U00002AD7', "suplarr;": '\U0000297B', "supmult;": '\U00002AC2', "supnE;": '\U00002ACC', "supne;": '\U0000228B', "supplus;": '\U00002AC0', "supset;": '\U00002283', "supseteq;": '\U00002287', "supseteqq;": '\U00002AC6', "supsetneq;": '\U0000228B', "supsetneqq;": '\U00002ACC', "supsim;": '\U00002AC8', "supsub;": '\U00002AD4', "supsup;": '\U00002AD6', "swArr;": '\U000021D9', "swarhk;": '\U00002926', "swarr;": '\U00002199', "swarrow;": '\U00002199', "swnwar;": '\U0000292A', "szlig;": '\U000000DF', "target;": '\U00002316', "tau;": '\U000003C4', "tbrk;": '\U000023B4', "tcaron;": '\U00000165', "tcedil;": '\U00000163', "tcy;": '\U00000442', "tdot;": '\U000020DB', "telrec;": '\U00002315', "tfr;": '\U0001D531', "there4;": '\U00002234', "therefore;": '\U00002234', "theta;": '\U000003B8', "thetasym;": '\U000003D1', "thetav;": '\U000003D1', "thickapprox;": '\U00002248', "thicksim;": '\U0000223C', "thinsp;": '\U00002009', "thkap;": '\U00002248', "thksim;": '\U0000223C', "thorn;": '\U000000FE', "tilde;": '\U000002DC', "times;": '\U000000D7', "timesb;": '\U000022A0', "timesbar;": '\U00002A31', "timesd;": '\U00002A30', "tint;": '\U0000222D', "toea;": '\U00002928', "top;": '\U000022A4', "topbot;": '\U00002336', "topcir;": '\U00002AF1', "topf;": '\U0001D565', "topfork;": '\U00002ADA', "tosa;": '\U00002929', "tprime;": '\U00002034', "trade;": '\U00002122', "triangle;": '\U000025B5', "triangledown;": '\U000025BF', "triangleleft;": '\U000025C3', "trianglelefteq;": '\U000022B4', "triangleq;": '\U0000225C', "triangleright;": '\U000025B9', "trianglerighteq;": '\U000022B5', "tridot;": '\U000025EC', "trie;": '\U0000225C', "triminus;": '\U00002A3A', "triplus;": '\U00002A39', "trisb;": '\U000029CD', "tritime;": '\U00002A3B', "trpezium;": '\U000023E2', "tscr;": '\U0001D4C9', "tscy;": '\U00000446', "tshcy;": '\U0000045B', "tstrok;": '\U00000167', "twixt;": '\U0000226C', "twoheadleftarrow;": '\U0000219E', "twoheadrightarrow;": '\U000021A0', "uArr;": '\U000021D1', "uHar;": '\U00002963', "uacute;": '\U000000FA', "uarr;": '\U00002191', "ubrcy;": '\U0000045E', "ubreve;": '\U0000016D', "ucirc;": '\U000000FB', "ucy;": '\U00000443', "udarr;": '\U000021C5', "udblac;": '\U00000171', "udhar;": '\U0000296E', "ufisht;": '\U0000297E', "ufr;": '\U0001D532', "ugrave;": '\U000000F9', "uharl;": '\U000021BF', "uharr;": '\U000021BE', "uhblk;": '\U00002580', "ulcorn;": '\U0000231C', "ulcorner;": '\U0000231C', "ulcrop;": '\U0000230F', "ultri;": '\U000025F8', "umacr;": '\U0000016B', "uml;": '\U000000A8', "uogon;": '\U00000173', "uopf;": '\U0001D566', "uparrow;": '\U00002191', "updownarrow;": '\U00002195', "upharpoonleft;": '\U000021BF', "upharpoonright;": '\U000021BE', "uplus;": '\U0000228E', "upsi;": '\U000003C5', "upsih;": '\U000003D2', "upsilon;": '\U000003C5', "upuparrows;": '\U000021C8', "urcorn;": '\U0000231D', "urcorner;": '\U0000231D', "urcrop;": '\U0000230E', "uring;": '\U0000016F', "urtri;": '\U000025F9', "uscr;": '\U0001D4CA', "utdot;": '\U000022F0', "utilde;": '\U00000169', "utri;": '\U000025B5', "utrif;": '\U000025B4', "uuarr;": '\U000021C8', "uuml;": '\U000000FC', "uwangle;": '\U000029A7', "vArr;": '\U000021D5', "vBar;": '\U00002AE8', "vBarv;": '\U00002AE9', "vDash;": '\U000022A8', "vangrt;": '\U0000299C', "varepsilon;": '\U000003F5', "varkappa;": '\U000003F0', "varnothing;": '\U00002205', "varphi;": '\U000003D5', "varpi;": '\U000003D6', "varpropto;": '\U0000221D', "varr;": '\U00002195', "varrho;": '\U000003F1', "varsigma;": '\U000003C2', "vartheta;": '\U000003D1', "vartriangleleft;": '\U000022B2', "vartriangleright;": '\U000022B3', "vcy;": '\U00000432', "vdash;": '\U000022A2', "vee;": '\U00002228', "veebar;": '\U000022BB', "veeeq;": '\U0000225A', "vellip;": '\U000022EE', "verbar;": '\U0000007C', "vert;": '\U0000007C', "vfr;": '\U0001D533', "vltri;": '\U000022B2', "vopf;": '\U0001D567', "vprop;": '\U0000221D', "vrtri;": '\U000022B3', "vscr;": '\U0001D4CB', "vzigzag;": '\U0000299A', "wcirc;": '\U00000175', "wedbar;": '\U00002A5F', "wedge;": '\U00002227', "wedgeq;": '\U00002259', "weierp;": '\U00002118', "wfr;": '\U0001D534', "wopf;": '\U0001D568', "wp;": '\U00002118', "wr;": '\U00002240', "wreath;": '\U00002240', "wscr;": '\U0001D4CC', "xcap;": '\U000022C2', "xcirc;": '\U000025EF', "xcup;": '\U000022C3', "xdtri;": '\U000025BD', "xfr;": '\U0001D535', "xhArr;": '\U000027FA', "xharr;": '\U000027F7', "xi;": '\U000003BE', "xlArr;": '\U000027F8', "xlarr;": '\U000027F5', "xmap;": '\U000027FC', "xnis;": '\U000022FB', "xodot;": '\U00002A00', "xopf;": '\U0001D569', "xoplus;": '\U00002A01', "xotime;": '\U00002A02', "xrArr;": '\U000027F9', "xrarr;": '\U000027F6', "xscr;": '\U0001D4CD', "xsqcup;": '\U00002A06', "xuplus;": '\U00002A04', "xutri;": '\U000025B3', "xvee;": '\U000022C1', "xwedge;": '\U000022C0', "yacute;": '\U000000FD', "yacy;": '\U0000044F', "ycirc;": '\U00000177', "ycy;": '\U0000044B', "yen;": '\U000000A5', "yfr;": '\U0001D536', "yicy;": '\U00000457', "yopf;": '\U0001D56A', "yscr;": '\U0001D4CE', "yucy;": '\U0000044E', "yuml;": '\U000000FF', "zacute;": '\U0000017A', "zcaron;": '\U0000017E', "zcy;": '\U00000437', "zdot;": '\U0000017C', "zeetrf;": '\U00002128', "zeta;": '\U000003B6', "zfr;": '\U0001D537', "zhcy;": '\U00000436', "zigrarr;": '\U000021DD', "zopf;": '\U0001D56B', "zscr;": '\U0001D4CF', "zwj;": '\U0000200D', "zwnj;": '\U0000200C', "AElig": '\U000000C6', "AMP": '\U00000026', "Aacute": '\U000000C1', "Acirc": '\U000000C2', "Agrave": '\U000000C0', "Aring": '\U000000C5', "Atilde": '\U000000C3', "Auml": '\U000000C4', "COPY": '\U000000A9', "Ccedil": '\U000000C7', "ETH": '\U000000D0', "Eacute": '\U000000C9', "Ecirc": '\U000000CA', "Egrave": '\U000000C8', "Euml": '\U000000CB', "GT": '\U0000003E', "Iacute": '\U000000CD', "Icirc": '\U000000CE', "Igrave": '\U000000CC', "Iuml": '\U000000CF', "LT": '\U0000003C', "Ntilde": '\U000000D1', "Oacute": '\U000000D3', "Ocirc": '\U000000D4', "Ograve": '\U000000D2', "Oslash": '\U000000D8', "Otilde": '\U000000D5', "Ouml": '\U000000D6', "QUOT": '\U00000022', "REG": '\U000000AE', "THORN": '\U000000DE', "Uacute": '\U000000DA', "Ucirc": '\U000000DB', "Ugrave": '\U000000D9', "Uuml": '\U000000DC', "Yacute": '\U000000DD', "aacute": '\U000000E1', "acirc": '\U000000E2', "acute": '\U000000B4', "aelig": '\U000000E6', "agrave": '\U000000E0', "amp": '\U00000026', "aring": '\U000000E5', "atilde": '\U000000E3', "auml": '\U000000E4', "brvbar": '\U000000A6', "ccedil": '\U000000E7', "cedil": '\U000000B8', "cent": '\U000000A2', "copy": '\U000000A9', "curren": '\U000000A4', "deg": '\U000000B0', "divide": '\U000000F7', "eacute": '\U000000E9', "ecirc": '\U000000EA', "egrave": '\U000000E8', "eth": '\U000000F0', "euml": '\U000000EB', "frac12": '\U000000BD', "frac14": '\U000000BC', "frac34": '\U000000BE', "gt": '\U0000003E', "iacute": '\U000000ED', "icirc": '\U000000EE', "iexcl": '\U000000A1', "igrave": '\U000000EC', "iquest": '\U000000BF', "iuml": '\U000000EF', "laquo": '\U000000AB', "lt": '\U0000003C', "macr": '\U000000AF', "micro": '\U000000B5', "middot": '\U000000B7', "nbsp": '\U000000A0', "not": '\U000000AC', "ntilde": '\U000000F1', "oacute": '\U000000F3', "ocirc": '\U000000F4', "ograve": '\U000000F2', "ordf": '\U000000AA', "ordm": '\U000000BA', "oslash": '\U000000F8', "otilde": '\U000000F5', "ouml": '\U000000F6', "para": '\U000000B6', "plusmn": '\U000000B1', "pound": '\U000000A3', "quot": '\U00000022', "raquo": '\U000000BB', "reg": '\U000000AE', "sect": '\U000000A7', "shy": '\U000000AD', "sup1": '\U000000B9', "sup2": '\U000000B2', "sup3": '\U000000B3', "szlig": '\U000000DF', "thorn": '\U000000FE', "times": '\U000000D7', "uacute": '\U000000FA', "ucirc": '\U000000FB', "ugrave": '\U000000F9', "uml": '\U000000A8', "uuml": '\U000000FC', "yacute": '\U000000FD', "yen": '\U000000A5', "yuml": '\U000000FF', } // HTML entities that are two unicode codepoints. var entity2 = map[string][2]rune{ // TODO(nigeltao): Handle replacements that are wider than their names. // "nLt;": {'\u226A', '\u20D2'}, // "nGt;": {'\u226B', '\u20D2'}, "NotEqualTilde;": {'\u2242', '\u0338'}, "NotGreaterFullEqual;": {'\u2267', '\u0338'}, "NotGreaterGreater;": {'\u226B', '\u0338'}, "NotGreaterSlantEqual;": {'\u2A7E', '\u0338'}, "NotHumpDownHump;": {'\u224E', '\u0338'}, "NotHumpEqual;": {'\u224F', '\u0338'}, "NotLeftTriangleBar;": {'\u29CF', '\u0338'}, "NotLessLess;": {'\u226A', '\u0338'}, "NotLessSlantEqual;": {'\u2A7D', '\u0338'}, "NotNestedGreaterGreater;": {'\u2AA2', '\u0338'}, "NotNestedLessLess;": {'\u2AA1', '\u0338'}, "NotPrecedesEqual;": {'\u2AAF', '\u0338'}, "NotRightTriangleBar;": {'\u29D0', '\u0338'}, "NotSquareSubset;": {'\u228F', '\u0338'}, "NotSquareSuperset;": {'\u2290', '\u0338'}, "NotSubset;": {'\u2282', '\u20D2'}, "NotSucceedsEqual;": {'\u2AB0', '\u0338'}, "NotSucceedsTilde;": {'\u227F', '\u0338'}, "NotSuperset;": {'\u2283', '\u20D2'}, "ThickSpace;": {'\u205F', '\u200A'}, "acE;": {'\u223E', '\u0333'}, "bne;": {'\u003D', '\u20E5'}, "bnequiv;": {'\u2261', '\u20E5'}, "caps;": {'\u2229', '\uFE00'}, "cups;": {'\u222A', '\uFE00'}, "fjlig;": {'\u0066', '\u006A'}, "gesl;": {'\u22DB', '\uFE00'}, "gvertneqq;": {'\u2269', '\uFE00'}, "gvnE;": {'\u2269', '\uFE00'}, "lates;": {'\u2AAD', '\uFE00'}, "lesg;": {'\u22DA', '\uFE00'}, "lvertneqq;": {'\u2268', '\uFE00'}, "lvnE;": {'\u2268', '\uFE00'}, "nGg;": {'\u22D9', '\u0338'}, "nGtv;": {'\u226B', '\u0338'}, "nLl;": {'\u22D8', '\u0338'}, "nLtv;": {'\u226A', '\u0338'}, "nang;": {'\u2220', '\u20D2'}, "napE;": {'\u2A70', '\u0338'}, "napid;": {'\u224B', '\u0338'}, "nbump;": {'\u224E', '\u0338'}, "nbumpe;": {'\u224F', '\u0338'}, "ncongdot;": {'\u2A6D', '\u0338'}, "nedot;": {'\u2250', '\u0338'}, "nesim;": {'\u2242', '\u0338'}, "ngE;": {'\u2267', '\u0338'}, "ngeqq;": {'\u2267', '\u0338'}, "ngeqslant;": {'\u2A7E', '\u0338'}, "nges;": {'\u2A7E', '\u0338'}, "nlE;": {'\u2266', '\u0338'}, "nleqq;": {'\u2266', '\u0338'}, "nleqslant;": {'\u2A7D', '\u0338'}, "nles;": {'\u2A7D', '\u0338'}, "notinE;": {'\u22F9', '\u0338'}, "notindot;": {'\u22F5', '\u0338'}, "nparsl;": {'\u2AFD', '\u20E5'}, "npart;": {'\u2202', '\u0338'}, "npre;": {'\u2AAF', '\u0338'}, "npreceq;": {'\u2AAF', '\u0338'}, "nrarrc;": {'\u2933', '\u0338'}, "nrarrw;": {'\u219D', '\u0338'}, "nsce;": {'\u2AB0', '\u0338'}, "nsubE;": {'\u2AC5', '\u0338'}, "nsubset;": {'\u2282', '\u20D2'}, "nsubseteqq;": {'\u2AC5', '\u0338'}, "nsucceq;": {'\u2AB0', '\u0338'}, "nsupE;": {'\u2AC6', '\u0338'}, "nsupset;": {'\u2283', '\u20D2'}, "nsupseteqq;": {'\u2AC6', '\u0338'}, "nvap;": {'\u224D', '\u20D2'}, "nvge;": {'\u2265', '\u20D2'}, "nvgt;": {'\u003E', '\u20D2'}, "nvle;": {'\u2264', '\u20D2'}, "nvlt;": {'\u003C', '\u20D2'}, "nvltrie;": {'\u22B4', '\u20D2'}, "nvrtrie;": {'\u22B5', '\u20D2'}, "nvsim;": {'\u223C', '\u20D2'}, "race;": {'\u223D', '\u0331'}, "smtes;": {'\u2AAC', '\uFE00'}, "sqcaps;": {'\u2293', '\uFE00'}, "sqcups;": {'\u2294', '\uFE00'}, "varsubsetneq;": {'\u228A', '\uFE00'}, "varsubsetneqq;": {'\u2ACB', '\uFE00'}, "varsupsetneq;": {'\u228B', '\uFE00'}, "varsupsetneqq;": {'\u2ACC', '\uFE00'}, "vnsub;": {'\u2282', '\u20D2'}, "vnsup;": {'\u2283', '\u20D2'}, "vsubnE;": {'\u2ACB', '\uFE00'}, "vsubne;": {'\u228A', '\uFE00'}, "vsupnE;": {'\u2ACC', '\uFE00'}, "vsupne;": {'\u228B', '\uFE00'}, } ================================================ FILE: internal/escape.go ================================================ // Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import ( "bytes" "io" "strings" "unicode/utf8" ) type writer interface { io.Writer io.ByteWriter WriteString(string) (int, error) } // These replacements permit compatibility with old numeric entities that // assumed Windows-1252 encoding. // https://html.spec.whatwg.org/multipage/syntax.html#consume-a-character-reference var replacementTable = [...]rune{ '\u20AC', // First entry is what 0x80 should be replaced with. '\u0081', '\u201A', '\u0192', '\u201E', '\u2026', '\u2020', '\u2021', '\u02C6', '\u2030', '\u0160', '\u2039', '\u0152', '\u008D', '\u017D', '\u008F', '\u0090', '\u2018', '\u2019', '\u201C', '\u201D', '\u2022', '\u2013', '\u2014', '\u02DC', '\u2122', '\u0161', '\u203A', '\u0153', '\u009D', '\u017E', '\u0178', // Last entry is 0x9F. // 0x00->'\uFFFD' is handled programmatically. // 0x0D->'\u000D' is a no-op. } // unescapeEntity reads an entity like "<" from b[src:] and writes the // corresponding "<" to b[dst:], returning the incremented dst and src cursors. // Precondition: b[src] == '&' && dst <= src. // attribute should be true if parsing an attribute value. func unescapeEntity(b []byte, dst, src int, attribute bool) (dst1, src1 int) { // https://html.spec.whatwg.org/multipage/syntax.html#consume-a-character-reference // i starts at 1 because we already know that s[0] == '&'. i, s := 1, b[src:] if len(s) <= 1 { b[dst] = b[src] return dst + 1, src + 1 } if s[i] == '#' { if len(s) <= 3 { // We need to have at least "&#.". b[dst] = b[src] return dst + 1, src + 1 } i++ c := s[i] hex := false if c == 'x' || c == 'X' { hex = true i++ } x := '\x00' for i < len(s) { c = s[i] i++ if hex { if '0' <= c && c <= '9' { x = 16*x + rune(c) - '0' continue } else if 'a' <= c && c <= 'f' { x = 16*x + rune(c) - 'a' + 10 continue } else if 'A' <= c && c <= 'F' { x = 16*x + rune(c) - 'A' + 10 continue } } else if '0' <= c && c <= '9' { x = 10*x + rune(c) - '0' continue } if c != ';' { i-- } break } if i <= 3 { // No characters matched. b[dst] = b[src] return dst + 1, src + 1 } if 0x80 <= x && x <= 0x9F { // Replace characters from Windows-1252 with UTF-8 equivalents. x = replacementTable[x-0x80] } else if x == 0 || (0xD800 <= x && x <= 0xDFFF) || x > 0x10FFFF { // Replace invalid characters with the replacement character. x = '\uFFFD' } return dst + utf8.EncodeRune(b[dst:], x), src + i } // Consume the maximum number of characters possible, with the // consumed characters matching one of the named references. for i < len(s) { c := s[i] i++ // Lower-cased characters are more common in entities, so we check for them first. if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' { continue } if c != ';' { i-- } break } entityName := string(s[1:i]) if entityName == "" { // No-op. } else if attribute && entityName[len(entityName)-1] != ';' && len(s) > i && s[i] == '=' { // No-op. } else if x := entity[entityName]; x != 0 { return dst + utf8.EncodeRune(b[dst:], x), src + i } else if x := entity2[entityName]; x[0] != 0 { dst1 := dst + utf8.EncodeRune(b[dst:], x[0]) return dst1 + utf8.EncodeRune(b[dst1:], x[1]), src + i } else if !attribute { maxLen := len(entityName) - 1 if maxLen > longestEntityWithoutSemicolon { maxLen = longestEntityWithoutSemicolon } for j := maxLen; j > 1; j-- { if x := entity[entityName[:j]]; x != 0 { return dst + utf8.EncodeRune(b[dst:], x), src + j + 1 } } } dst1, src1 = dst+i, src+i copy(b[dst:dst1], b[src:src1]) return dst1, src1 } // unescape unescapes b's entities in-place, so that "a<b" becomes "a': esc = ">" case '"': // """ is shorter than """. esc = """ case '\r': esc = " " default: panic("unrecognized escape character") } s = s[i+1:] if _, err := w.WriteString(esc); err != nil { return err } i = strings.IndexAny(s, escapedChars) } _, err := w.WriteString(s) return err } // EscapeString escapes special characters like "<" to become "<". It // escapes only five such characters: <, >, &, ' and ". // UnescapeString(EscapeString(s)) == s always holds, but the converse isn't // always true. func EscapeString(s string) string { if !strings.ContainsAny(s, escapedChars) { return s } var buf bytes.Buffer escape(&buf, s) return buf.String() } // UnescapeString unescapes entities like "<" to become "<". It unescapes a // larger range of entities than EscapeString escapes. For example, "á" // unescapes to "á", as does "á" and "&xE1;". // UnescapeString(EscapeString(s)) == s always holds, but the converse isn't // always true. func UnescapeString(s string) string { for _, c := range s { if c == '&' { return string(unescape([]byte(s), false)) } } return s } ================================================ FILE: internal/foreign.go ================================================ // Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import ( "strings" ) func adjustAttributeNames(aa []Attribute, nameMap map[string]string) { for i := range aa { if newName, ok := nameMap[aa[i].Key]; ok { aa[i].Key = newName } } } func adjustForeignAttributes(aa []Attribute) { for i, a := range aa { if a.Key == "" || a.Key[0] != 'x' { continue } switch a.Key { case "xlink:actuate", "xlink:arcrole", "xlink:href", "xlink:role", "xlink:show", "xlink:title", "xlink:type", "xml:base", "xml:lang", "xml:space", "xmlns:xlink": j := strings.Index(a.Key, ":") aa[i].Namespace = a.Key[:j] aa[i].Key = a.Key[j+1:] } } } func htmlIntegrationPoint(n *Node) bool { if n.Type != ElementNode { return false } switch n.Namespace { case "math": if n.Data == "annotation-xml" { for _, a := range n.Attr { if a.Key == "encoding" { val := strings.ToLower(a.Val) if val == "text/html" || val == "application/xhtml+xml" { return true } } } } case "svg": switch n.Data { case "desc", "foreignObject", "title": return true } } return false } func mathMLTextIntegrationPoint(n *Node) bool { if n.Namespace != "math" { return false } switch n.Data { case "mi", "mo", "mn", "ms", "mtext": return true } return false } // Section 12.2.6.5. var breakout = map[string]bool{ "b": true, "big": true, "blockquote": true, "body": true, "br": true, "center": true, "code": true, "dd": true, "div": true, "dl": true, "dt": true, "em": true, "embed": true, "h1": true, "h2": true, "h3": true, "h4": true, "h5": true, "h6": true, "head": true, "hr": true, "i": true, "img": true, "li": true, "listing": true, "menu": true, "meta": true, "nobr": true, "ol": true, "p": true, "pre": true, "ruby": true, "s": true, "small": true, "span": true, "strong": true, "strike": true, "sub": true, "sup": true, "table": true, "tt": true, "u": true, "ul": true, "var": true, } // Section 12.2.6.5. var svgTagNameAdjustments = map[string]string{ "altglyph": "altGlyph", "altglyphdef": "altGlyphDef", "altglyphitem": "altGlyphItem", "animatecolor": "animateColor", "animatemotion": "animateMotion", "animatetransform": "animateTransform", "clippath": "clipPath", "feblend": "feBlend", "fecolormatrix": "feColorMatrix", "fecomponenttransfer": "feComponentTransfer", "fecomposite": "feComposite", "feconvolvematrix": "feConvolveMatrix", "fediffuselighting": "feDiffuseLighting", "fedisplacementmap": "feDisplacementMap", "fedistantlight": "feDistantLight", "feflood": "feFlood", "fefunca": "feFuncA", "fefuncb": "feFuncB", "fefuncg": "feFuncG", "fefuncr": "feFuncR", "fegaussianblur": "feGaussianBlur", "feimage": "feImage", "femerge": "feMerge", "femergenode": "feMergeNode", "femorphology": "feMorphology", "feoffset": "feOffset", "fepointlight": "fePointLight", "fespecularlighting": "feSpecularLighting", "fespotlight": "feSpotLight", "fetile": "feTile", "feturbulence": "feTurbulence", "foreignobject": "foreignObject", "glyphref": "glyphRef", "lineargradient": "linearGradient", "radialgradient": "radialGradient", "textpath": "textPath", } // Section 12.2.6.1 var mathMLAttributeAdjustments = map[string]string{ "definitionurl": "definitionURL", } var svgAttributeAdjustments = map[string]string{ "attributename": "attributeName", "attributetype": "attributeType", "basefrequency": "baseFrequency", "baseprofile": "baseProfile", "calcmode": "calcMode", "clippathunits": "clipPathUnits", "diffuseconstant": "diffuseConstant", "edgemode": "edgeMode", "filterunits": "filterUnits", "glyphref": "glyphRef", "gradienttransform": "gradientTransform", "gradientunits": "gradientUnits", "kernelmatrix": "kernelMatrix", "kernelunitlength": "kernelUnitLength", "keypoints": "keyPoints", "keysplines": "keySplines", "keytimes": "keyTimes", "lengthadjust": "lengthAdjust", "limitingconeangle": "limitingConeAngle", "markerheight": "markerHeight", "markerunits": "markerUnits", "markerwidth": "markerWidth", "maskcontentunits": "maskContentUnits", "maskunits": "maskUnits", "numoctaves": "numOctaves", "pathlength": "pathLength", "patterncontentunits": "patternContentUnits", "patterntransform": "patternTransform", "patternunits": "patternUnits", "pointsatx": "pointsAtX", "pointsaty": "pointsAtY", "pointsatz": "pointsAtZ", "preservealpha": "preserveAlpha", "preserveaspectratio": "preserveAspectRatio", "primitiveunits": "primitiveUnits", "refx": "refX", "refy": "refY", "repeatcount": "repeatCount", "repeatdur": "repeatDur", "requiredextensions": "requiredExtensions", "requiredfeatures": "requiredFeatures", "specularconstant": "specularConstant", "specularexponent": "specularExponent", "spreadmethod": "spreadMethod", "startoffset": "startOffset", "stddeviation": "stdDeviation", "stitchtiles": "stitchTiles", "surfacescale": "surfaceScale", "systemlanguage": "systemLanguage", "tablevalues": "tableValues", "targetx": "targetX", "targety": "targetY", "textlength": "textLength", "viewbox": "viewBox", "viewtarget": "viewTarget", "xchannelselector": "xChannelSelector", "ychannelselector": "yChannelSelector", "zoomandpan": "zoomAndPan", } ================================================ FILE: internal/handler/handler.go ================================================ package handler import ( "errors" "strings" "github.com/withastro/compiler/internal/loc" "github.com/withastro/compiler/internal/sourcemap" ) type Handler struct { sourcetext string filename string builder sourcemap.ChunkBuilder errors []error warnings []error infos []error hints []error } func NewHandler(sourcetext string, filename string) *Handler { return &Handler{ sourcetext: sourcetext, filename: filename, builder: sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(sourcetext, len(strings.Split(sourcetext, "\n")))), errors: make([]error, 0), warnings: make([]error, 0), infos: make([]error, 0), hints: make([]error, 0), } } func (h *Handler) HasErrors() bool { return len(h.errors) > 0 } func (h *Handler) AppendError(err error) { h.errors = append(h.errors, err) } func (h *Handler) AppendWarning(err error) { h.warnings = append(h.warnings, err) } func (h *Handler) AppendInfo(err error) { h.infos = append(h.infos, err) } func (h *Handler) AppendHint(err error) { h.hints = append(h.hints, err) } func (h *Handler) Errors() []loc.DiagnosticMessage { msgs := make([]loc.DiagnosticMessage, 0) for _, err := range h.errors { if err != nil { msgs = append(msgs, ErrorToMessage(h, loc.ErrorType, err)) } } return msgs } func (h *Handler) Warnings() []loc.DiagnosticMessage { msgs := make([]loc.DiagnosticMessage, 0) for _, err := range h.warnings { if err != nil { msgs = append(msgs, ErrorToMessage(h, loc.WarningType, err)) } } return msgs } func (h *Handler) Diagnostics() []loc.DiagnosticMessage { msgs := make([]loc.DiagnosticMessage, 0) for _, err := range h.errors { if err != nil { msgs = append(msgs, ErrorToMessage(h, loc.ErrorType, err)) } } for _, err := range h.warnings { if err != nil { msgs = append(msgs, ErrorToMessage(h, loc.WarningType, err)) } } for _, err := range h.infos { if err != nil { msgs = append(msgs, ErrorToMessage(h, loc.InformationType, err)) } } for _, err := range h.hints { if err != nil { msgs = append(msgs, ErrorToMessage(h, loc.HintType, err)) } } return msgs } func ErrorToMessage(h *Handler, severity loc.DiagnosticSeverity, err error) loc.DiagnosticMessage { var rangedError *loc.ErrorWithRange switch { case errors.As(err, &rangedError): pos := h.builder.GetLineAndColumnForLocation(rangedError.Range.Loc) location := &loc.DiagnosticLocation{ File: h.filename, Line: pos[0], Column: pos[1], Length: rangedError.Range.Len, } message := rangedError.ToMessage(location) message.Severity = int(severity) return message default: return loc.DiagnosticMessage{Text: err.Error()} } } ================================================ FILE: internal/hash.go ================================================ package astro import ( "encoding/base32" "strings" "github.com/withastro/compiler/internal/xxhash" ) func HashString(str string) string { h := xxhash.New() //nolint h.Write([]byte(str)) hashBytes := h.Sum(nil) return strings.ToLower(base32.StdEncoding.EncodeToString(hashBytes)[:8]) } ================================================ FILE: internal/helpers/joiner.go ================================================ package helpers import ( "bytes" "strings" ) // This provides an efficient way to join lots of big string and byte slices // together. It avoids the cost of repeatedly reallocating as the buffer grows // by measuring exactly how big the buffer should be and then allocating once. // This is a measurable speedup. type Joiner struct { lastByte byte strings []joinerString bytes []joinerBytes length uint32 } type joinerString struct { data string offset uint32 } type joinerBytes struct { data []byte offset uint32 } func (j *Joiner) AddString(data string) { if len(data) > 0 { j.lastByte = data[len(data)-1] } j.strings = append(j.strings, joinerString{data, j.length}) j.length += uint32(len(data)) } func (j *Joiner) AddBytes(data []byte) { if len(data) > 0 { j.lastByte = data[len(data)-1] } j.bytes = append(j.bytes, joinerBytes{data, j.length}) j.length += uint32(len(data)) } func (j *Joiner) LastByte() byte { return j.lastByte } func (j *Joiner) Length() uint32 { return j.length } func (j *Joiner) EnsureNewlineAtEnd() { if j.length > 0 && j.lastByte != '\n' { j.AddString("\n") } } func (j *Joiner) Done() []byte { if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 { // No need to allocate if there was only a single byte array written return j.bytes[0].data } buffer := make([]byte, j.length) for _, item := range j.strings { copy(buffer[item.offset:], item.data) } for _, item := range j.bytes { copy(buffer[item.offset:], item.data) } return buffer } func (j *Joiner) Contains(s string, b []byte) bool { for _, item := range j.strings { if strings.Contains(item.data, s) { return true } } for _, item := range j.bytes { if bytes.Contains(item.data, b) { return true } } return false } ================================================ FILE: internal/helpers/js_comment_utils.go ================================================ package helpers import ( "strings" ) func peekIs(input string, cur int, assert byte) bool { return cur+1 < len(input) && input[cur+1] == assert } // RemoveComments removes both block and inline comments from a string func RemoveComments(input string) string { var ( sb = strings.Builder{} inComment = false ) for cur := 0; cur < len(input); cur++ { if input[cur] == '/' && !inComment { if peekIs(input, cur, '*') { inComment = true cur++ } else if peekIs(input, cur, '/') { // Skip until the end of line for inline comments for cur < len(input) && input[cur] != '\n' { cur++ } continue } } else if input[cur] == '*' && inComment && peekIs(input, cur, '/') { inComment = false cur++ continue } if !inComment { sb.WriteByte(input[cur]) } } if inComment { return "" } return strings.TrimSpace(sb.String()) } ================================================ FILE: internal/js_scanner/js_scanner.go ================================================ package js_scanner import ( "bytes" "fmt" "io" "strings" "github.com/iancoleman/strcase" "github.com/tdewolff/parse/v2" "github.com/tdewolff/parse/v2/js" "github.com/withastro/compiler/internal/loc" ) // FindTopLevelReturns scans JavaScript/TypeScript source code and returns the // byte positions of all `return` statements that are at the top level (i.e., not // inside any function, arrow function, method, or class method). // // This is used to transform top-level returns into throws in TSX output, because // top-level returns are valid in Astro frontmatter but cause TypeScript parsing errors. func FindTopLevelReturns(source []byte) []int { l := js.NewLexer(parse.NewInputBytes(source)) i := 0 returns := make([]int, 0) // We need to track "function scope depth" - returns are only top-level // if they're not inside any function body. // // The challenge is distinguishing between: // - `if (cond) { return; }` - top-level return (inside if block) // - `function f() { return; }` - not top-level (inside function) // - `() => { return; }` - not top-level (inside arrow function) // - `class C { method() { return; } }` - not top-level (inside class method) // - `{ method() { return; } }` - not top-level (inside object method) // - `{ ['computed']() { return; } }` - not top-level (computed method) // // Strategy: Track when we're expecting a function body to start. // A function body starts with `{` after: // - `function` keyword followed by optional name and `()` // - `=>` (arrow function) // - `identifier()` where `{` follows (method shorthand in objects/classes) // - `[expr]()` where `{` follows (computed method in objects/classes) functionScopeStack := make([]int, 0) // stack of brace depths when entering function scopes braceDepth := 0 bracketDepth := 0 // Track parentheses depth to detect when we close params parenDepth := 0 parenDepthAtFunctionStart := -1 // the paren depth when we saw `function` keyword // Track if we're expecting a function body expectingFunctionBody := false // Track method shorthand: identifier + () + { = method shorthand // We need to track the paren depth when we see an identifier, so we know // if the identifier is BEFORE the parens (method shorthand) or INSIDE them (not method) // E.g., `method() { }` vs `if (condition) { }` identParenDepth := -1 // paren depth when we last saw an identifier at current level // Track that we actually went through parens after seeing identifier // This distinguishes `method() {` from `class Foo {` wentThroughParensForMethod := false // Track computed property: [expr] + () + { = computed method // After we see `]` that closes a bracket at the same level, we may have a computed method sawCloseBracketForMethod := false for { token, value := l.Next() // Handle regex vs division ambiguity if token == js.DivToken || token == js.DivEqToken { if i+1 < len(source) { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { token, value = l.RegExp() } } } if token == js.ErrorToken { if l.Err() != io.EOF { return returns } break } // Skip whitespace and comments if token == js.WhitespaceToken || token == js.LineTerminatorToken || token == js.CommentToken || token == js.CommentLineTerminatorToken { i += len(value) continue } // Track identifiers (for method shorthand pattern: identifier + () + {) // Only track if we're not already inside parens from something else if js.IsIdentifier(token) { identParenDepth = parenDepth wentThroughParensForMethod = false sawCloseBracketForMethod = false i += len(value) continue } // Track parentheses if js.IsPunctuator(token) { if value[0] == '(' { parenDepth++ i += len(value) continue } else if value[0] == ')' { parenDepth-- // If we close parens back to function start level, we expect function body next if parenDepthAtFunctionStart >= 0 && parenDepth == parenDepthAtFunctionStart { expectingFunctionBody = true parenDepthAtFunctionStart = -1 } // Check if we just closed parens back to where we saw an identifier // This means we went through `identifier()` pattern if identParenDepth >= 0 && parenDepth == identParenDepth { wentThroughParensForMethod = true } i += len(value) continue } } // Track square brackets for computed properties [expr] if js.IsPunctuator(token) { if value[0] == '[' { bracketDepth++ sawCloseBracketForMethod = false i += len(value) continue } else if value[0] == ']' { bracketDepth-- // Mark that we just closed a bracket - this could be a computed property name // The next thing should be `()` for it to be a method sawCloseBracketForMethod = true identParenDepth = -1 i += len(value) continue } } // Detect arrow function: `=>` means we expect a function body if token == js.ArrowToken { expectingFunctionBody = true identParenDepth = -1 sawCloseBracketForMethod = false i += len(value) continue } // Track function keywords - after `function`, we wait for `(` then `)` if token == js.FunctionToken { parenDepthAtFunctionStart = parenDepth identParenDepth = -1 sawCloseBracketForMethod = false i += len(value) continue } // Track braces if js.IsPunctuator(token) { if value[0] == '{' { // Check if this brace opens a function body // This happens after: // 1. `function name()` or `function()` // 2. `=>` // 3. `identifier()` (method shorthand) - identifier followed by () then { // 4. `[expr]()` (computed method) - sawCloseBracketForMethod was set and we went through () isMethodShorthand := wentThroughParensForMethod isComputedMethod := sawCloseBracketForMethod if expectingFunctionBody || isMethodShorthand || isComputedMethod { // Entering a function scope functionScopeStack = append(functionScopeStack, braceDepth) expectingFunctionBody = false } identParenDepth = -1 wentThroughParensForMethod = false sawCloseBracketForMethod = false braceDepth++ i += len(value) continue } else if value[0] == '}' { braceDepth-- // Check if we're exiting a function scope if len(functionScopeStack) > 0 && braceDepth == functionScopeStack[len(functionScopeStack)-1] { functionScopeStack = functionScopeStack[:len(functionScopeStack)-1] } identParenDepth = -1 wentThroughParensForMethod = false sawCloseBracketForMethod = false i += len(value) continue } } // Reset identifier tracking on other tokens (but preserve sawCloseBracketForMethod // through parens so `[expr]()` works) identParenDepth = -1 // A return is top-level if we're not inside any function scope if token == js.ReturnToken && len(functionScopeStack) == 0 { returns = append(returns, i) } i += len(value) } return returns } type HoistedScripts struct { Hoisted [][]byte HoistedLocs []loc.Loc Body [][]byte BodyLocs []loc.Loc } func HoistExports(source []byte) HoistedScripts { shouldHoist := bytes.Contains(source, []byte("export")) if !shouldHoist { body := make([][]byte, 0) body = append(body, source) bodyLocs := make([]loc.Loc, 0) bodyLocs = append(bodyLocs, loc.Loc{Start: 0}) return HoistedScripts{ Body: body, BodyLocs: bodyLocs, } } l := js.NewLexer(parse.NewInputBytes(source)) i := 0 end := 0 hoisted := make([][]byte, 0) hoistedLocs := make([]loc.Loc, 0) body := make([][]byte, 0) bodyLocs := make([]loc.Loc, 0) pairs := make(map[byte]int) // Let's lex the script until we find what we need! outer: for { token, value := l.Next() if token == js.DivToken || token == js.DivEqToken { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { token, value = l.RegExp() } } if token == js.ErrorToken { if l.Err() != io.EOF { body := make([][]byte, 0) body = append(body, source) bodyLocs := make([]loc.Loc, 0) bodyLocs = append(bodyLocs, loc.Loc{Start: 0}) return HoistedScripts{ Hoisted: hoisted, HoistedLocs: hoistedLocs, Body: body, BodyLocs: bodyLocs, } } break } // Common delimiters. Track their length, then skip. if token == js.WhitespaceToken || token == js.LineTerminatorToken || token == js.SemicolonToken { i += len(value) continue } // Exports should be consumed until all opening braces are closed, // a specifier is found, and a line terminator has been found if token == js.ExportToken { flags := make(map[string]bool) tokensFound := make(map[string]bool) foundIdent := false foundSemicolonOrLineTerminator := false foundBody := false start := i i += len(value) for { next, nextValue := l.Next() if next == js.DivToken || next == js.DivEqToken { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { next, nextValue = l.RegExp() } } i += len(nextValue) flags[string(nextValue)] = true tokensFound[string(nextValue)] = true if next == js.ErrorToken && l.Err() == io.EOF { foundSemicolonOrLineTerminator = true } if js.IsIdentifier(next) { if isKeyword(nextValue) && next != js.FromToken { continue } if string(nextValue) == "type" { continue } if !foundIdent { foundIdent = true } } else if next == js.LineTerminatorToken || next == js.SemicolonToken { if next == js.LineTerminatorToken && i < len(source) && (source[i] == '&' || source[i] == '|') { continue } if (flags["function"] || flags["=>"] || flags["interface"]) && !foundBody { continue } if flags["&"] || flags["="] { continue } if pairs['('] > 0 { continue } foundSemicolonOrLineTerminator = true } else if js.IsPunctuator(next) { if nextValue[0] == '{' { if flags["function"] { // Curly braces can occur in a function parameter destructuring, which we don't want to consider foundBody = foundBody || pairs['('] == 0 } else if flags["=>"] { // Arrow can also occur in type definition before arrow function body (which we don't want to consider), but `=` cannot foundBody = foundBody || tokensFound["="] } else { foundBody = true } } if nextValue[0] == '{' || nextValue[0] == '(' || nextValue[0] == '[' { flags[string(nextValue[0])] = true pairs[nextValue[0]]++ } else if nextValue[0] == '}' { pairs['{']-- } else if nextValue[0] == ')' { pairs['(']-- } else if nextValue[0] == ']' { pairs['[']-- } } else { // Sometimes, exports are written in multiple lines, like // // export const foo = // [...] // export type Props = ThisProps & // SomeWeirdType<{ thatsSuperLong: SoItEndsUpFormattedLikeThis }> // // So, we omit the semicolon check if the line ends up with one of these if flags["&"] && nextValue[0] != '&' { flags["&"] = false } if flags["="] && nextValue[0] != '=' { flags["="] = false } } if foundIdent && foundSemicolonOrLineTerminator && pairs['{'] == 0 && pairs['('] == 0 && pairs['['] == 0 { hoisted = append(hoisted, source[start:i]) hoistedLocs = append(hoistedLocs, loc.Loc{Start: start}) if end < start { body = append(body, source[end:start]) bodyLocs = append(bodyLocs, loc.Loc{Start: end}) } end = i continue outer } if next == js.ErrorToken { if l.Err() != io.EOF { body := make([][]byte, 0) body = append(body, source) bodyLocs := make([]loc.Loc, 0) bodyLocs = append(bodyLocs, loc.Loc{Start: 0}) return HoistedScripts{ Hoisted: hoisted, HoistedLocs: hoistedLocs, Body: body, BodyLocs: bodyLocs, } } break outer } } } // Track opening and closing braces if js.IsPunctuator(token) { if value[0] == '{' || value[0] == '(' || value[0] == '[' { pairs[value[0]]++ i += len(value) continue } else if value[0] == '}' { pairs['{']-- } else if value[0] == ')' { pairs['(']-- } else if value[0] == ']' { pairs['[']-- } } // Track our current position i += len(value) } body = append(body, source[end:]) bodyLocs = append(bodyLocs, loc.Loc{Start: end}) return HoistedScripts{ Hoisted: hoisted, HoistedLocs: hoistedLocs, Body: body, BodyLocs: bodyLocs, } } func isKeyword(value []byte) bool { return js.Keywords[string(value)] != 0 } // isPropsAliasing checks if we're in a Props aliasing context (import { Props as X }) // rather than destructuring with 'as' property ({ as: Component }) func isPropsAliasing(idents []string) bool { return len(idents) > 0 && idents[len(idents)-1] == "Props" } func HoistImports(source []byte) HoistedScripts { imports := make([][]byte, 0) importLocs := make([]loc.Loc, 0) body := make([][]byte, 0) bodyLocs := make([]loc.Loc, 0) prev := 0 for i, statement := NextImportStatement(source, 0); i > -1 && i < len(source)+1; i, statement = NextImportStatement(source, i) { bodyLocs = append(bodyLocs, loc.Loc{Start: prev}) body = append(body, source[prev:statement.Span.Start]) imports = append(imports, statement.Value) importLocs = append(importLocs, loc.Loc{Start: statement.Span.Start}) prev = i } if prev == 0 { bodyLocs = append(bodyLocs, loc.Loc{Start: 0}) body = append(body, source) return HoistedScripts{Body: body, BodyLocs: bodyLocs} } bodyLocs = append(bodyLocs, loc.Loc{Start: prev}) body = append(body, source[prev:]) return HoistedScripts{Hoisted: imports, HoistedLocs: importLocs, Body: body, BodyLocs: bodyLocs} } func HasGetStaticPaths(source []byte) bool { ident := []byte("getStaticPaths") if !bytes.Contains(source, ident) { return false } exports := HoistExports(source) for _, statement := range exports.Hoisted { if bytes.Contains(statement, ident) { return true } } return false } type Props struct { Ident string Statement string Generics string } func GetPropsType(source []byte) Props { defaultPropType := "Record" ident := defaultPropType genericsIdents := make([]string, 0) generics := "" statement := "" if !bytes.Contains(source, []byte("Props")) { return Props{ Ident: ident, Statement: statement, Generics: generics, } } l := js.NewLexer(parse.NewInputBytes(source)) i := 0 pairs := make(map[byte]int) idents := make([]string, 0) start := 0 end := 0 outer: for { token, value := l.Next() if token == js.DivToken || token == js.DivEqToken { if len(source) > i { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { token, value = l.RegExp() } } } if token == js.ErrorToken { if l.Err() != io.EOF { return Props{ Ident: ident, } } break } // Common delimiters. Track their length, then skip. if token == js.WhitespaceToken || token == js.LineTerminatorToken || token == js.SemicolonToken { i += len(value) continue } if token == js.ExtendsToken { if bytes.Equal(value, []byte("extends")) { idents = append(idents, "extends") } i += len(value) continue } if pairs['{'] == 0 && pairs['('] == 0 && pairs['['] == 0 && pairs['<'] == 1 && token == js.CommaToken { idents = make([]string, 0) i += len(value) continue } if js.IsIdentifier(token) { if isKeyword(value) { // fix(#814): fix Props detection when using `{ Props as SomethingElse }` // fix(#927): only reset Props when 'as' follows 'Props' in the same context if ident == "Props" && string(value) == "as" && isPropsAliasing(idents) { start = 0 ident = defaultPropType idents = make([]string, 0) } i += len(value) continue } if pairs['<'] == 1 && pairs['{'] == 0 { foundExtends := false for _, id := range idents { if id == "extends" { foundExtends = true } } if !foundExtends { genericsIdents = append(genericsIdents, string(value)) } i += len(value) continue } // Note: do not check that `pairs['{'] == 0` to support named imports if pairs['('] == 0 && pairs['['] == 0 && string(value) == "Props" { ident = "Props" } idents = append(idents, string(value)) i += len(value) continue } if bytes.ContainsAny(value, "<>") { if len(idents) > 0 && idents[len(idents)-1] == "Props" { start = i ident = "Props" idents = make([]string, 0) } for _, c := range value { if c == '<' { pairs['<']++ i += len(value) continue } if c == '>' { pairs['<']-- if pairs['<'] == 0 { end = i // Important: only break out if we've already found `Props`! if ident != defaultPropType { break outer } else { continue } } } } } if token == js.QuestionToken || (pairs['{'] == 0 && token == js.ColonToken) { idents = make([]string, 0) idents = append(idents, "extends") } // Track opening and closing braces if js.IsPunctuator(token) { if value[0] == '{' || value[0] == '(' || value[0] == '[' { idents = make([]string, 0) pairs[value[0]]++ i += len(value) continue } else if value[0] == '}' { pairs['{']-- if pairs['<'] == 0 && pairs['{'] == 0 && ident != defaultPropType { end = i break outer } } else if value[0] == ')' { pairs['(']-- } else if value[0] == ']' { pairs['[']-- } } // Track our current position i += len(value) } if start > 0 && len(genericsIdents) > 0 && ident != defaultPropType { generics = fmt.Sprintf("<%s>", strings.Join(genericsIdents, ", ")) statement = strings.TrimSpace(string(source[start:end])) } return Props{ Ident: ident, Statement: statement, Generics: generics, } } func IsIdentifier(value []byte) bool { valid := true for i, b := range value { if i == 0 { valid = js.IsIdentifierStart([]byte{b}) } else if i < len(value)-1 { valid = js.IsIdentifierContinue([]byte{b}) } else { valid = js.IsIdentifierEnd([]byte{b}) } if !valid { break } } return valid } func GetObjectKeys(source []byte) [][]byte { keys := make([][]byte, 0) pairs := make(map[byte]int) if source[0] == '{' && source[len(source)-1] == '}' { l := js.NewLexer(parse.NewInputBytes(source[1 : len(source)-1])) i := 0 var prev js.TokenType for { token, value := l.Next() openPairs := pairs['{'] > 0 || pairs['('] > 0 || pairs['['] > 0 if token == js.DivToken || token == js.DivEqToken { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { token, value = l.RegExp() } } i += len(value) if token == js.ErrorToken { return keys } if js.IsPunctuator(token) { if value[0] == '{' || value[0] == '(' || value[0] == '[' { pairs[value[0]]++ continue } else if value[0] == '}' { pairs['{']-- } else if value[0] == ')' { pairs['(']-- } else if value[0] == ']' { pairs['[']-- } } if prev != js.ColonToken { push := func() { if token != js.StringToken { keys = append(keys, value) } else { key := value[1 : len(value)-1] ident := string(key) if !IsIdentifier(key) { ident = strcase.ToLowerCamel(string(key)) } if string(key) == ident { keys = append(keys, []byte(key)) } else { keys = append(keys, []byte(fmt.Sprintf("%s: %s", value, ident))) } } } if !openPairs && (token == js.IdentifierToken || token == js.StringToken) { push() } else if pairs['['] == 1 && token == js.StringToken { push() } } if !openPairs && token != js.WhitespaceToken { prev = token } } } return keys } type Import struct { IsType bool ExportName string LocalName string Assertions string } type ImportStatement struct { Span loc.Span Value []byte IsType bool Imports []Import Specifier string Assertions string } type ImportState uint32 const ( ImportDefault ImportState = iota ImportNamed ) func NextImportStatement(source []byte, pos int) (int, ImportStatement) { l := js.NewLexer(parse.NewInputBytes(source[pos:])) i := pos for { token, value := l.Next() if len(source) > i && token == js.DivToken || token == js.DivEqToken { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { token, value = l.RegExp() } } if token == js.ErrorToken { // EOF or other error return -1, ImportStatement{} } // Imports should be consumed up until we find a specifier, // then we can exit after the following line terminator or semicolon if token == js.ImportToken { i += len(value) text := []byte(value) isType := false specifier := "" assertion := "" foundSpecifier := false foundAssertion := false imports := make([]Import, 0) importState := ImportDefault currImport := Import{} pairs := make(map[byte]int) for { next, nextValue := l.Next() if len(source) > i && (next == js.DivToken || next == js.DivEqToken) { lns := bytes.Split(source[i+1:], []byte{'\n'}) if bytes.Contains(lns[0], []byte{'/'}) { next, nextValue = l.RegExp() } } i += len(nextValue) text = append(text, nextValue...) if next == js.ErrorToken { break } if next == js.DotToken { isMeta := false for { next, _ := l.Next() if next == js.MetaToken { isMeta = true } if next != js.WhitespaceToken && next != js.MetaToken { break } } if isMeta { continue } } if !foundSpecifier && next == js.StringToken { if len(nextValue) > 1 { specifier = string(nextValue[1 : len(nextValue)-1]) foundSpecifier = true } continue } if !foundSpecifier && next == js.IdentifierToken && string(nextValue) == "type" { isType = true } if foundSpecifier && (next == js.LineTerminatorToken || next == js.SemicolonToken) && pairs['{'] == 0 && pairs['('] == 0 && pairs['['] == 0 { if currImport.ExportName != "" { if currImport.LocalName == "" { currImport.LocalName = currImport.ExportName } imports = append(imports, currImport) } return i, ImportStatement{ Span: loc.Span{Start: i - len(text), End: i}, Value: text, IsType: isType, Imports: imports, Specifier: specifier, Assertions: assertion, } } if next == js.WhitespaceToken { continue } if foundAssertion { assertion += string(nextValue) } if !foundAssertion && next == js.StringToken { specifier = string(nextValue[1 : len(nextValue)-1]) foundSpecifier = true continue } if !foundAssertion && foundSpecifier && next == js.IdentifierToken && string(nextValue) == "assert" { foundAssertion = true continue } if !foundAssertion && next == js.OpenBraceToken { importState = ImportNamed } if !foundAssertion && next == js.CommaToken { if currImport.LocalName == "" { currImport.LocalName = currImport.ExportName } imports = append(imports, currImport) currImport = Import{} } if !foundAssertion && next == js.IdentifierToken { if currImport.ExportName != "" { currImport.LocalName = string(nextValue) } else if importState == ImportNamed { currImport.ExportName = string(nextValue) } else if importState == ImportDefault { currImport.ExportName = "default" currImport.LocalName = string(nextValue) } } if !foundAssertion && next == js.MulToken { currImport.ExportName = string(nextValue) } if js.IsPunctuator(next) { if nextValue[0] == '{' || nextValue[0] == '(' || nextValue[0] == '[' { pairs[nextValue[0]]++ } else if nextValue[0] == '}' { pairs['{']-- } else if nextValue[0] == ')' { pairs['(']-- } else if nextValue[0] == ']' { pairs['[']-- } } // do not hoist dynamic imports if next == js.OpenParenToken && len(specifier) == 0 { break } // do not hoist `{ import: "value" }` if next == js.ColonToken && len(specifier) == 0 { break } // if this is import.meta.*, ignore (watch for first dot) if next == js.DotToken && len(specifier) == 0 { break } } } i += len(value) } } /* Determines the export name of a component, i.e. the object path to which we can access the module, if it were imported using a dynamic import (`import()`) Returns the export name and a boolean indicating whether the component is imported AND used in the template. */ func ExtractComponentExportName(data string, imported Import) (string, bool) { namespacePrefix := fmt.Sprintf("%s.", imported.LocalName) isNamespacedComponent := strings.Contains(data, ".") && strings.HasPrefix(data, namespacePrefix) localNameEqualsData := imported.LocalName == data if isNamespacedComponent || localNameEqualsData { var exportName string switch true { case localNameEqualsData: exportName = imported.ExportName case imported.ExportName == "*": // matched a namespaced import exportName = strings.Replace(data, namespacePrefix, "", 1) case imported.ExportName == "default": // matched a default import exportName = strings.Replace(data, imported.LocalName, "default", 1) default: // matched a named import exportName = data } return exportName, true } return "", false } ================================================ FILE: internal/js_scanner/js_scanner_test.go ================================================ package js_scanner import ( "bytes" "encoding/json" "fmt" "strings" "testing" "unicode/utf8" "github.com/withastro/compiler/internal/test_utils" ) type testcase struct { name string source string want string only bool } // Test cases for FindTopLevelReturns func TestFindTopLevelReturns(t *testing.T) { tests := []struct { name string source string want []int only bool }{ { name: "basic top-level return", source: `return "value";`, want: []int{0}, }, { name: "return inside function declaration", source: `function foo() { return "value"; }`, want: nil, }, { name: "return inside arrow function", source: `const foo = () => { return "value"; }`, want: nil, }, { name: "return inside class method", source: `class Component { render() { return "wow"! } }`, want: nil, }, { name: "return inside exported async function", source: `export async function getStaticPaths({ paginate }: { paginate: PaginateFunction }) { const { data: products }: { data: IProduct[] } = await getEntry("products", "products"); return paginate(products, { pageSize: 10, }); }`, want: nil, }, { name: "mixed: function with return, then top-level return", source: `const foo = () => { return "value"; } if (true) { return "value"; } `, want: []int{51}, }, { name: "multiple top-level returns", source: `const foo = () => { return "value"; } if (true) { return "value"; } if (true) { return "value"; } `, want: []int{51, 83}, }, { name: "return inside object method shorthand", source: `const something = { someFunction: () => { return "Hello World"; }, someOtherFunction() { return "Hello World"; }, };`, want: nil, }, { name: "return inside arrow function with satisfies", source: `export const getStaticPaths = (({ paginate }) => { const data = [0, 1, 2]; return paginate(data, { pageSize: 10, }); }) satisfies GetStaticPaths;`, want: nil, }, { name: "top-level return with Astro.redirect", source: `if (something) { return Astro.redirect(); }`, want: []int{18}, }, { name: "no returns at all", source: `const foo = "bar"; console.log(foo);`, want: nil, }, { name: "computed method in class with generic arrow", source: `class Foo { ['get']() { return 'ok'; } } const generic = (value: T) => { return value; }; if (true) { return Astro.redirect('/test'); }`, want: []int{110}, }, { name: "computed method in object", source: `const obj = { ['get']() { return 'obj'; } }; if (true) { return Astro.redirect(); }`, want: []int{57}, }, { name: "generic arrow function", source: `const generic = (value: T) => { return value; }; if (true) { return Astro.redirect(); }`, want: []int{65}, }, } for _, tt := range tests { if tt.only { tests = []struct { name string source string want []int only bool }{tt} break } } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := FindTopLevelReturns([]byte(tt.source)) if diff := test_utils.ANSIDiff(fmt.Sprint(tt.want), fmt.Sprint(got)); diff != "" { t.Errorf("mismatch (-want +got):\n%s", diff) } }) } } func fixturesHoistImport() []testcase { return []testcase{ { name: "basic", source: `const value = "test"`, want: ``, }, { name: "import", source: `import { fn } from "package"; const b = await fetch();`, want: `import { fn } from "package"; `, }, { name: "dynamic", source: `const markdownDocs = await Astro.glob('../markdown/*.md') const article2 = await import('../markdown/article2.md') `, want: "", }, { name: "big import", source: `import { a, b, c, d, } from "package" const b = await fetch();`, want: `import { a, b, c, d, } from "package" `, }, { name: "import with comment", source: `// comment import { fn } from "package"; const b = await fetch();`, want: `import { fn } from "package";`, }, { name: "import assertion", source: `// comment import { fn } from "package" assert { it: 'works' }; const b = await fetch();`, want: `import { fn } from "package" assert { it: 'works' };`, }, { name: "import assertion 2", source: `// comment import { fn } from "package" assert { it: 'works' }; const b = await fetch();`, want: `import { fn } from "package" assert { it: 'works' }; `, }, { name: "import.meta.env", source: `console.log(import.meta.env.FOO); import Test from "../components/Test.astro";`, want: `import Test from "../components/Test.astro";`, }, { name: "import.meta.env II", source: `console.log( import .meta .env .FOO ); import Test from "../components/Test.astro";`, want: `import Test from "../components/Test.astro";`, }, { name: "import/export", source: `import { fn } from "package"; export async fn() {} const b = await fetch()`, want: `import { fn } from "package";`, }, { name: "getStaticPaths", source: `import { fn } from "package"; export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `import { fn } from "package";`, }, { name: "getStaticPaths with comments", source: `import { fn } from "package"; export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `import { fn } from "package";`, }, { name: "getStaticPaths with semicolon", source: `import { fn } from "package"; export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); }; const b = await fetch()`, want: `import { fn } from "package";`, }, { name: "getStaticPaths with RegExp escape", source: `export async function getStaticPaths() { const pattern = /\.md$/g.test('value'); } import a from "a";`, want: `import a from "a";`, }, { name: "getStaticPaths with divider", source: `export async function getStaticPaths() { const pattern = a / b; }`, want: ``, }, { name: "getStaticPaths with divider and following content", source: `export async function getStaticPaths() { const value = 1 / 2; } // comment import { b } from "b"; const { a } = Astro.props;`, want: `import { b } from "b";`, }, { name: "getStaticPaths with regex and following content", source: `export async function getStaticPaths() { const value = /2/g; } // comment import { b } from "b"; const { a } = Astro.props;`, want: `import { b } from "b";`, }, { name: "multiple imports", source: `import { a } from "a"; import { b } from "b"; // comment import { c } from "c"; const d = await fetch() // comment import { d } from "d";`, want: `import { a } from "a"; import { b } from "b"; import { c } from "c"; import { d } from "d"; `, }, { name: "assignment", source: `let show = true;`, want: ``, }, { name: "RegExp is not a comment", source: `import { a } from "a"; /import \{ b \} from "b";/; import { c } from "c";`, want: `import { a } from "a"; import { c } from "c"; `, }, } } func TestHoistImport(t *testing.T) { tests := fixturesHoistImport() for _, tt := range tests { if tt.only { tests = make([]testcase, 0) tests = append(tests, tt) break } } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := HoistImports([]byte(tt.source)) got := []byte{} for _, imp := range result.Hoisted { got = append(got, bytes.TrimSpace(imp)...) got = append(got, '\n') } // compare to expected string, show diff if mismatch if diff := test_utils.ANSIDiff(strings.TrimSpace(tt.want), strings.TrimSpace(string(got))); diff != "" { t.Errorf("mismatch (-want +got):\n%s", diff) } }) } } func FuzzHoistImport(f *testing.F) { tests := fixturesHoistImport() for _, tt := range tests { f.Add(tt.source) // Use f.Add to provide a seed corpus } f.Fuzz(func(t *testing.T, source string) { result := HoistImports([]byte(source)) got := []byte{} for _, imp := range result.Hoisted { got = append(got, bytes.TrimSpace(imp)...) got = append(got, '\n') } if utf8.ValidString(source) && !utf8.ValidString(string(got)) { t.Errorf("Import hoisting produced an invalid string: %q", got) } }) } func TestHoistExport(t *testing.T) { tests := []testcase{ { name: "getStaticPaths", source: `import { fn } from "package"; export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and destructured props", source: `import { fn } from "package"; export async function getStaticPaths({ paginate }) { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export async function getStaticPaths({ paginate }) { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and param definition type in curly braces", source: `import { fn } from "package"; export async function getStaticPaths(input: { paginate: any }) { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export async function getStaticPaths(input: { paginate: any }) { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and param definition type in square braces", source: `import { fn } from "package"; export async function getStaticPaths([{ stuff }]) { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export async function getStaticPaths([{ stuff }]) { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and type specified with square braces 1", source: `import { fn } from "package"; export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and type specified with square braces 2", source: `import { fn } from "package"; export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and type specified with square braces 3", source: `import { fn } from "package"; export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and type specified with square braces 4", source: `import { fn } from "package"; export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export const getStaticPaths: () => { params: any }[] = () => { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with curly brace on next line and definition specified by anonymous function with destructured parameter", source: `import { fn } from "package"; export const getStaticPaths = function({ paginate }) { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export const getStaticPaths = function({ paginate }) { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with comments", source: `import { fn } from "package"; export async function getStaticPaths() { // This works! const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export async function getStaticPaths() { // This works! const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with semicolon", source: `import { fn } from "package"; export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); }; const b = await fetch()`, want: `export async function getStaticPaths() { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "getStaticPaths with RegExp escape", source: `// cool export async function getStaticPaths() { const pattern = /\.md$/g.test('value'); } import a from "a";`, want: `export async function getStaticPaths() { const pattern = /\.md$/g.test('value'); }`, }, { name: "getStaticPaths with divider", source: `export async function getStaticPaths() { const pattern = a / b; }`, want: `export async function getStaticPaths() { const pattern = a / b; }`, }, { name: "getStaticPaths with divider and following content", source: `export async function getStaticPaths() { const value = 1 / 2; } // comment import { b } from "b"; const { a } = Astro.props;`, want: `export async function getStaticPaths() { const value = 1 / 2; }`, }, { name: "getStaticPaths with regex and following content", source: `// comment export async function getStaticPaths() { const value = /2/g; } import { b } from "b"; const { a } = Astro.props;`, want: `export async function getStaticPaths() { const value = /2/g; }`, }, { name: "getStaticPaths with TypeScript type", source: `import { fn } from "package"; export async function getStaticPaths({ paginate }: { paginate: any }) { const content = Astro.fetchContent('**/*.md'); } const b = await fetch()`, want: `export async function getStaticPaths({ paginate }: { paginate: any }) { const content = Astro.fetchContent('**/*.md'); }`, }, { name: "export interface", source: `import { a } from "a"; export interface Props { open?: boolean; }`, want: `export interface Props { open?: boolean; }`, }, { name: "export multiple", source: `import { a } from "a"; export interface Props { open?: boolean; } export const foo = "bar"`, want: `export interface Props { open?: boolean; } export const foo = "bar"`, }, { name: "export multiple with content after", source: `import { a } from "a"; export interface Props { open?: boolean; } export const baz = "bing" // beep boop`, want: `export interface Props { open?: boolean; } export const baz = "bing"`, }, { name: "export three", source: `import { a } from "a"; export interface Props {} export const a = "b" export const c = "d"`, want: `export interface Props {} export const a = "b" export const c = "d"`, }, { name: "export with comments", source: `import { a } from "a"; // comment export interface Props {} export const a = "b" export const c = "d"`, want: `export interface Props {} export const a = "b" export const c = "d"`, }, { name: "export local reference (runtime error)", source: `import { a } from "a"; export interface Props {} const value = await fetch("something") export const data = { value } `, want: `export interface Props {} export const data = { value }`, }, { name: "export passthrough", source: `export * from "./local-data.json"; export { default as A } from "./_types" export B from "./_types" export type C from "./_types"`, want: `export * from "./local-data.json"; export { default as A } from "./_types" export B from "./_types" export type C from "./_types"`, }, { name: "multi-line export", source: `export interface Props { foo: 'bar'; }`, want: `export interface Props { foo: 'bar'; }`, }, { name: "multi-line type export", source: `export type Props = { foo: 'bar'; }`, want: `export type Props = { foo: 'bar'; }`, }, { name: "multi-line type export with multiple exports", source: `export type Theme = 'light' | 'dark'; export type Props = { theme: Theme; }; export interface Foo { bar: string; } export type FooAndBar1 = 'Foo' & 'Bar'; export type FooAndBar2 = 'Foo' & 'Bar'; export type FooOrBar = 'Foo' | 'Bar';`, want: `export type Theme = 'light' | 'dark'; export type Props = { theme: Theme; } export interface Foo { bar: string; } export type FooAndBar1 = 'Foo' & 'Bar'; export type FooAndBar2 = 'Foo' & 'Bar'; export type FooOrBar = 'Foo' | 'Bar';`, }, { name: "Picture", source: `// @ts-ignore import loader from 'virtual:image-loader'; import { getPicture } from '../src/get-picture.js'; import type { ImageAttributes, ImageMetadata, OutputFormat, PictureAttributes, TransformOptions } from '../src/types.js'; export interface LocalImageProps extends Omit, Omit, Omit { src: ImageMetadata | Promise<{ default: ImageMetadata }>; sizes: HTMLImageElement['sizes']; widths: number[]; formats?: OutputFormat[]; } export interface RemoteImageProps extends Omit, TransformOptions, Omit { src: string; sizes: HTMLImageElement['sizes']; widths: number[]; aspectRatio: TransformOptions['aspectRatio']; formats?: OutputFormat[]; } export type Props = LocalImageProps | RemoteImageProps; const { src, sizes, widths, aspectRatio, formats = ['avif', 'webp'], loading = 'lazy', decoding = 'async', ...attrs } = Astro.props as Props; const { image, sources } = await getPicture({ loader, src, widths, formats, aspectRatio }); `, want: `export interface LocalImageProps extends Omit, Omit, Omit { src: ImageMetadata | Promise<{ default: ImageMetadata }>; sizes: HTMLImageElement['sizes']; widths: number[]; formats?: OutputFormat[]; } export interface RemoteImageProps extends Omit, TransformOptions, Omit { src: string; sizes: HTMLImageElement['sizes']; widths: number[]; aspectRatio: TransformOptions['aspectRatio']; formats?: OutputFormat[]; } export type Props = LocalImageProps | RemoteImageProps;`, }, { name: "Image", source: `// @ts-ignore import loader from 'virtual:image-loader'; import { getImage } from '../src/index.js'; import type { ImageAttributes, ImageMetadata, TransformOptions, OutputFormat } from '../src/types.js'; const { loading = "lazy", decoding = "async", ...props } = Astro.props as Props; const attrs = await getImage(loader, props); // Moved after Astro.props for test export interface LocalImageProps extends Omit, Omit { src: ImageMetadata | Promise<{ default: ImageMetadata }>; } export interface RemoteImageProps extends TransformOptions, ImageAttributes { src: string; format: OutputFormat; width: number; height: number; } export type Props = LocalImageProps | RemoteImageProps; `, want: `export interface LocalImageProps extends Omit, Omit { src: ImageMetadata | Promise<{ default: ImageMetadata }>; } export interface RemoteImageProps extends TransformOptions, ImageAttributes { src: string; format: OutputFormat; width: number; height: number; } export type Props = LocalImageProps | RemoteImageProps;`, }, { name: "comments", source: `// export const foo = 0 /* */`, want: `export const foo = 0`, }, } for _, tt := range tests { if tt.only { tests = make([]testcase, 0) tests = append(tests, tt) break } } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := HoistExports([]byte(tt.source)) got := []byte{} for _, imp := range result.Hoisted { got = append(got, bytes.TrimSpace(imp)...) got = append(got, '\n') } // compare to expected string, show diff if mismatch if diff := test_utils.ANSIDiff(strings.TrimSpace(tt.want), strings.TrimSpace(string(got))); diff != "" { t.Errorf("mismatch (-want +got):\n%s", diff) } }) } } type keytestcase struct { name string source string want []string only bool } func TestGetObjectKeys(t *testing.T) { tests := []keytestcase{ { name: "basic", source: `{ value }`, want: []string{"value"}, }, { name: "shorhand", source: `{ value, foo, bar, baz, bing }`, want: []string{"value", "foo", "bar", "baz", "bing"}, }, { name: "literal", source: `{ value: 0 }`, want: []string{"value"}, }, { name: "multiple", source: `{ a: 0, b: 1, c: 2 }`, want: []string{"a", "b", "c"}, }, { name: "objects", source: `{ a: { a1: 0 }, b: { b1: { b2: 0 }}, c: { c1: { c2: { c3: 0 }}} }`, want: []string{"a", "b", "c"}, }, { name: "regexp", source: `{ a: /hello/g, b: 0 }`, want: []string{"a", "b"}, }, { name: "array", source: `{ a: [0, 1, 2], b: ["one", "two", "three"], c: 0 }`, want: []string{"a", "b", "c"}, }, { name: "valid strings", source: `{ "lowercase": true, "camelCase": true, "PascalCase": true, "snake_case": true, "__private": true, ["computed"]: true, }`, // Note that quotes are dropped want: []string{`lowercase`, `camelCase`, `PascalCase`, `snake_case`, `__private`, `computed`}, }, { name: "invalid strings", source: `{ "dash-case": true, "with.dot": true, "with space": true }`, want: []string{`"dash-case": dashCase`, `"with.dot": withDot`, `"with space": withSpace`}, }, } for _, tt := range tests { if tt.only { tests = make([]keytestcase, 0) tests = append(tests, tt) break } } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { keys := GetObjectKeys([]byte(tt.source)) output := make([]string, 0) for _, key := range keys { output = append(output, string(key)) } got, _ := json.Marshal(output) want, _ := json.Marshal(tt.want) // compare to expected string, show diff if mismatch if diff := test_utils.ANSIDiff(string(want), string(got)); diff != "" { t.Errorf("mismatch (-want +got):\n%s", diff) } }) } } // propsTestCase represents a test case for GetPropsType type propsTestCase struct { name string source string want Props } // makeProps is a helper to create Props structs concisely func makeProps(ident string, statement string, generics string) Props { return Props{ Ident: ident, Statement: statement, Generics: generics, } } // getPropsTypeTestCases returns all test cases for GetPropsType func getPropsTypeTestCases() []propsTestCase { const defaultType = "Record" return []propsTestCase{ // Basic cases { name: "no props", source: `const foo = "bar"`, want: makeProps(defaultType, "", ""), }, { name: "interface Props", source: `interface Props { foo: string; }`, want: makeProps("Props", "", ""), }, { name: "type Props", source: `type Props = { foo: string; }`, want: makeProps("Props", "", ""), }, // Generics { name: "Props with generics", source: `interface Props { foo: T; }`, want: makeProps("Props", "", ""), }, // Issue #927: 'as' prop name handling { name: "destructuring with 'as' prop name without type assertion - issue #927", source: `interface Props { as?: string; href?: string; } const { as: Component, href } = Astro.props;`, want: makeProps("Props", "", ""), }, { name: "destructuring with 'as' prop name with type assertion", source: `interface Props { as?: string; href?: string; } const { as: Component, href } = Astro.props as Props;`, want: makeProps("Props", "", ""), }, } } // checks if two Props are equal and reports errors func assertPropsEqual(t *testing.T, got, want Props, source string) { t.Helper() if got.Ident != want.Ident { t.Errorf("Ident mismatch:\n got: %q\n want: %q", got.Ident, want.Ident) t.Logf("Source:\n%s", source) } if got.Statement != want.Statement { t.Errorf("Statement mismatch:\n got: %q\n want: %q", got.Statement, want.Statement) } if got.Generics != want.Generics { t.Errorf("Generics mismatch:\n got: %q\n want: %q", got.Generics, want.Generics) } } func TestGetPropsType(t *testing.T) { tests := getPropsTypeTestCases() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := GetPropsType([]byte(tt.source)) assertPropsEqual(t, got, tt.want, tt.source) }) } } ================================================ FILE: internal/js_scanner/testdata/fuzz/FuzzHoistImport/ec55358ab2929fbf4deab52587664e42682f0a6ea201a325c5c33f9d18c50456 ================================================ go test fuzz v1 string("import\"\nimport \"\";") ================================================ FILE: internal/loc/diagnostics.go ================================================ package loc type DiagnosticCode int const ( ERROR DiagnosticCode = 1000 ERROR_UNTERMINATED_JS_COMMENT DiagnosticCode = 1001 ERROR_FRAGMENT_SHORTHAND_ATTRS DiagnosticCode = 1002 ERROR_UNMATCHED_IMPORT DiagnosticCode = 1003 ERROR_UNSUPPORTED_SLOT_ATTRIBUTE DiagnosticCode = 1004 ERROR_UNTERMINATED_STRING DiagnosticCode = 1005 ERROR_MISSING_FRONTMATTER_FENCE DiagnosticCode = 1006 WARNING DiagnosticCode = 2000 WARNING_UNTERMINATED_HTML_COMMENT DiagnosticCode = 2001 WARNING_UNCLOSED_HTML_TAG DiagnosticCode = 2002 WARNING_DEPRECATED_DIRECTIVE DiagnosticCode = 2003 WARNING_IGNORED_DIRECTIVE DiagnosticCode = 2004 WARNING_UNSUPPORTED_EXPRESSION DiagnosticCode = 2005 WARNING_SET_WITH_CHILDREN DiagnosticCode = 2006 WARNING_CANNOT_DEFINE_VARS DiagnosticCode = 2007 WARNING_INVALID_SPREAD DiagnosticCode = 2008 WARNING_UNEXPECTED_CHARACTER DiagnosticCode = 2009 WARNING_CANNOT_RERUN DiagnosticCode = 2010 INFO DiagnosticCode = 3000 HINT DiagnosticCode = 4000 ) ================================================ FILE: internal/loc/loc.go ================================================ package loc type Loc struct { // This is the 0-based index of this location from the start of the file, in bytes Start int } type Range struct { Loc Loc Len int } func (r Range) End() int { return r.Loc.Start + r.Len } // span is a range of bytes in a Tokenizer's buffer. The start is inclusive, // the end is exclusive. type Span struct { Start, End int } type TSXRange struct { Start int `js:"start"` End int `js:"end"` } // A NodeType is the type of a Node. type DiagnosticSeverity int const ( ErrorType DiagnosticSeverity = 1 WarningType DiagnosticSeverity = 2 InformationType DiagnosticSeverity = 3 HintType DiagnosticSeverity = 4 ) type DiagnosticMessage struct { Severity int `js:"severity"` Code int `js:"code"` Location *DiagnosticLocation `js:"location"` Hint string `js:"hint"` Text string `js:"text"` } type DiagnosticLocation struct { File string `js:"file"` Line int `js:"line"` Column int `js:"column"` Length int `js:"length"` } type ErrorWithRange struct { Code DiagnosticCode Text string Hint string Range Range } func (e *ErrorWithRange) Error() string { return e.Text } func (e *ErrorWithRange) ToMessage(location *DiagnosticLocation) DiagnosticMessage { return DiagnosticMessage{ Code: int(e.Code), Text: e.Error(), Hint: e.Hint, Location: location, } } ================================================ FILE: internal/node.go ================================================ // Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import ( "github.com/withastro/compiler/internal/loc" "golang.org/x/net/html/atom" ) // A NodeType is the type of a Node. type NodeType uint32 const ( ErrorNode NodeType = iota TextNode DocumentNode ElementNode CommentNode DoctypeNode // RawNode nodes are not returned by the parser, but can be part of the // Node tree passed to func Render to insert raw HTML (without escaping). // If so, this package makes no guarantee that the rendered HTML is secure // (from e.g. Cross Site Scripting attacks) or well-formed. RawNode scopeMarkerNode // Extensions FrontmatterNode ExpressionNode RenderHeadNode ) func (t NodeType) String() string { switch t { case ErrorNode: return "error" case TextNode: return "text" case DocumentNode: return "root" case ElementNode: return "element" case CommentNode: return "comment" case DoctypeNode: return "doctype" case FrontmatterNode: return "frontmatter" case ExpressionNode: return "expression" default: return "" } } // Used as an Attribute Key to mark implicit nodes const ImplicitNodeMarker = "\x00implicit" // Section 12.2.4.3 says "The markers are inserted when entering applet, // object, marquee, template, td, th, and caption elements, and are used // to prevent formatting from "leaking" into applet, object, marquee, // template, td, th, and caption elements". var scopeMarker = Node{Type: scopeMarkerNode} type HydratedComponentMetadata struct { ExportName string LocalName string Specifier string ResolvedPath string } // A Node consists of a NodeType and some Data (tag name for element nodes, // content for text) and are part of a tree of Nodes. Element nodes may also // have a Namespace and contain a slice of Attributes. Data is unescaped, so // that it looks like "a 0 { n := (*s)[i-1] *s = (*s)[:i-1] return n } return nil } // top returns the most recently pushed node, or nil if s is empty. func (s *nodeStack) top() *Node { if i := len(*s); i > 0 { return (*s)[i-1] } return nil } // index returns the index of the top-most occurrence of n in the stack, or -1 // if n is not present. func (s *nodeStack) index(n *Node) int { for i := len(*s) - 1; i >= 0; i-- { if (*s)[i] == n { return i } } return -1 } // contains returns whether a is within s. func (s *nodeStack) contains(a atom.Atom) bool { for _, n := range *s { if n.DataAtom == a && n.Namespace == "" { return true } } return false } // insert inserts a node at the given index. func (s *nodeStack) insert(i int, n *Node) { (*s) = append(*s, nil) copy((*s)[i+1:], (*s)[i:]) (*s)[i] = n } // remove removes a node from the stack. It is a no-op if n is not present. func (s *nodeStack) remove(n *Node) { i := s.index(n) if i == -1 { return } copy((*s)[i:], (*s)[i+1:]) j := len(*s) - 1 (*s)[j] = nil *s = (*s)[:j] } type insertionModeStack []insertionMode func (s *insertionModeStack) pop() (im insertionMode) { i := len(*s) im = (*s)[i-1] *s = (*s)[:i-1] return im } func (s *insertionModeStack) top() insertionMode { if i := len(*s); i > 0 { return (*s)[i-1] } return nil } ================================================ FILE: internal/parser.go ================================================ // Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import ( "errors" "fmt" "io" "strings" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/loc" a "golang.org/x/net/html/atom" ) // A parser implements the HTML5 parsing algorithm: // https://html.spec.whatwg.org/multipage/syntax.html#tree-construction type parser struct { // tokenizer provides the tokens for the parser. tokenizer *Tokenizer // tok is the most recently read token. tok Token ltok Token // Self-closing tags like


are treated as start tags, except that // hasSelfClosingToken is set while they are being processed. hasSelfClosingToken bool // flag to signal that frontmatter has been added // if we don't have frontmatter and we enter a tag, we add empty frontmatter frontmatterState FrontmatterState // doc is the document root element. doc *Node // fm is the document's frontmatter node fm *Node // The stack of open elements (section 12.2.4.2) and active formatting // elements (section 12.2.4.3). oe, afe nodeStack // Element pointers (section 12.2.4.4). head, form *Node // Other parsing state flags (section 12.2.4.5). scripting, framesetOK bool // The stack of template insertion modes templateStack insertionModeStack // im is the current insertion mode. im insertionMode // originalIM is the insertion mode to go back to after completing a text // or inTableText insertion mode. originalIM insertionMode exitLiteralIM func() bool // fosterParenting is whether new elements should be inserted according to // the foster parenting rules (section 12.2.6.1). fosterParenting bool // quirks is whether the parser is operating in "quirks mode." quirks bool // fragment is whether the parser is parsing an HTML fragment. fragment bool // literal is whether the parser should handle exceptions literally. literal bool // context is the context element when parsing an HTML fragment // (section 12.4). context *Node handler *handler.Handler } func (p *parser) top() *Node { if n := p.oe.top(); n != nil { return n } return p.doc } // Stop tags for use in popUntil. These come from section 12.2.4.2. var ( defaultScopeStopTags = map[string][]a.Atom{ "": {a.Applet, a.Caption, a.Html, a.Table, a.Td, a.Th, a.Marquee, a.Object, a.Template}, "math": {a.AnnotationXml, a.Mi, a.Mn, a.Mo, a.Ms, a.Mtext}, "svg": {a.Desc, a.ForeignObject, a.Title}, } ) type scope int const ( defaultScope scope = iota listItemScope buttonScope tableScope tableRowScope tableBodyScope selectScope ) // popUntil pops the stack of open elements at the highest element whose tag // is in matchTags, provided there is no higher element in the scope's stop // tags (as defined in section 12.2.4.2). It returns whether or not there was // such an element. If there was not, popUntil leaves the stack unchanged. // // For example, the set of stop tags for table scope is: "html", "table". If // the stack was: // ["html", "body", "font", "table", "b", "i", "u"] // then popUntil(tableScope, "font") would return false, but // popUntil(tableScope, "i") would return true and the stack would become: // ["html", "body", "font", "table", "b"] // // If an element's tag is in both the stop tags and matchTags, then the stack // will be popped and the function returns true (provided, of course, there was // no higher element in the stack that was also in the stop tags). For example, // popUntil(tableScope, "table") returns true and leaves: // ["html", "body", "font"] func (p *parser) popUntil(s scope, matchTags ...a.Atom) bool { if i := p.indexOfElementInScope(s, matchTags...); i != -1 { p.oe = p.oe[:i] return true } return false } // indexOfElementInScope returns the index in p.oe of the highest element whose // tag is in matchTags that is in scope. If no matching element is in scope, it // returns -1. func (p *parser) indexOfElementInScope(s scope, matchTags ...a.Atom) int { for i := len(p.oe) - 1; i >= 0; i-- { tagAtom := p.oe[i].DataAtom if p.oe[i].Namespace == "" { for _, t := range matchTags { if t == tagAtom { return i } } switch s { case defaultScope: // No-op. case listItemScope: if tagAtom == a.Ol || tagAtom == a.Ul { return -1 } case buttonScope: if tagAtom == a.Button { return -1 } case tableScope: if tagAtom == a.Html || tagAtom == a.Table || tagAtom == a.Template { return -1 } case selectScope: if tagAtom != a.Optgroup && tagAtom != a.Option { return -1 } default: panic("unreachable") } } switch s { case defaultScope, listItemScope, buttonScope: for _, t := range defaultScopeStopTags[p.oe[i].Namespace] { if t == tagAtom { return -1 } } } } return -1 } // elementInScope is like popUntil, except that it doesn't modify the stack of // open elements. func (p *parser) elementInScope(s scope, matchTags ...a.Atom) bool { return p.indexOfElementInScope(s, matchTags...) != -1 } // clearStackToContext pops elements off the stack of open elements until a // scope-defined element is found. func (p *parser) clearStackToContext(s scope) { for i := len(p.oe) - 1; i >= 0; i-- { tagAtom := p.oe[i].DataAtom switch s { case tableScope: if tagAtom == a.Html || tagAtom == a.Table || tagAtom == a.Template { p.oe = p.oe[:i+1] return } case tableRowScope: if tagAtom == a.Html || tagAtom == a.Tr || tagAtom == a.Template { p.oe = p.oe[:i+1] return } case tableBodyScope: if tagAtom == a.Html || tagAtom == a.Tbody || tagAtom == a.Tfoot || tagAtom == a.Thead || tagAtom == a.Template { p.oe = p.oe[:i+1] return } default: panic("unreachable") } } } // parseGenericRawTextElements implements the generic raw text element parsing // algorithm defined in 12.2.6.2. // https://html.spec.whatwg.org/multipage/parsing.html#parsing-elements-that-contain-only-text // TODO: Since both RAWTEXT and RCDATA states are treated as tokenizer's part // officially, need to make tokenizer consider both states. func (p *parser) parseGenericRawTextElement() { p.addElement() p.originalIM = p.im p.im = textIM } func (p *parser) generateLoc() []loc.Loc { locs := make([]loc.Loc, 0, 2) locs = append(locs, p.tok.Loc) switch p.tok.Type { case TextToken: locs = append(locs, loc.Loc{Start: p.tok.Loc.Start + len(p.tok.Data)}) case CommentToken: locs = append(locs, loc.Loc{Start: p.tok.Loc.Start + len(p.tok.Data) + 3}) } return locs } func (p *parser) addLoc() { n := p.oe.top() if n != nil { n.Loc = append(n.Loc, p.tok.Loc) } } // generateImpliedEndTags pops nodes off the stack of open elements as long as // the top node has a tag name of dd, dt, li, optgroup, option, p, rb, rp, rt or rtc. // If exceptions are specified, nodes with that name will not be popped off. func (p *parser) generateImpliedEndTags(exceptions ...string) { var i int loop: for i = len(p.oe) - 1; i >= 0; i-- { n := p.oe[i] if n.Type != ElementNode { break } switch n.DataAtom { case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc: for _, except := range exceptions { if n.Data == except { break loop } } continue } break } p.oe = p.oe[:i+1] } // addChild adds a child node n to the top element, and pushes n onto the stack // of open elements if it is an element node. func (p *parser) addChild(n *Node) { if p.shouldFosterParent() { p.fosterParent(n) } else { p.top().AppendChild(n) } if n.Type == ElementNode { p.oe = append(p.oe, n) } } // shouldFosterParent returns whether the next node to be added should be // foster parented. func (p *parser) shouldFosterParent() bool { if p.fosterParenting { switch p.top().DataAtom { case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr: return true } } return false } // fosterParent adds a child node according to the foster parenting rules. // Section 12.2.6.1, "foster parenting". func (p *parser) fosterParent(n *Node) { var table, parent, prev, template *Node var i int for i = len(p.oe) - 1; i >= 0; i-- { if p.oe[i].DataAtom == a.Table { table = p.oe[i] break } } var j int for j = len(p.oe) - 1; j >= 0; j-- { if p.oe[j].DataAtom == a.Template { template = p.oe[j] break } } if template != nil && (table == nil || j > i) { template.AppendChild(n) return } if table == nil { // The foster parent is the html element. parent = p.oe[0] } else { parent = table.Parent } if parent == nil { parent = p.oe[i-1] } if table != nil { prev = table.PrevSibling } else { prev = parent.LastChild } if prev != nil && prev.Type == TextNode && n.Type == TextNode { prev.Data += n.Data return } parent.InsertBefore(n, table) } // addText adds text to the preceding node if it is a text node, or else it // calls addChild with a new text node. func (p *parser) addText(text string) { if text == "" { return } if p.shouldFosterParent() { p.fosterParent(&Node{ Type: TextNode, Data: text, Loc: p.generateLoc(), }) return } t := p.top() if n := t.LastChild; n != nil && n.Type == TextNode { n.Data += text return } p.addChild(&Node{ Type: TextNode, Data: text, Loc: p.generateLoc(), }) } func (p *parser) addFrontmatter(empty bool) { if p.frontmatterState == FrontmatterInitial { if p.doc.FirstChild != nil { p.fm = &Node{ Type: FrontmatterNode, Loc: p.generateLoc(), } p.doc.InsertBefore(p.fm, p.doc.FirstChild) } else { p.fm = &Node{ Type: FrontmatterNode, Loc: p.generateLoc(), } p.doc.AppendChild(p.fm) } if empty { p.frontmatterState = FrontmatterClosed p.fm.Attr = append(p.fm.Attr, Attribute{Key: ImplicitNodeMarker, Type: EmptyAttribute}) } else { p.frontmatterState = FrontmatterOpen p.oe = append(p.oe, p.fm) } } } // addExpression adds a child expression based on the current token. func (p *parser) addExpression() { p.addChild(&Node{ Type: ElementNode, DataAtom: a.Template, Data: "astro:expression", Attr: make([]Attribute, 0), Expression: true, Component: false, CustomElement: false, HandledScript: false, Loc: p.generateLoc(), Namespace: p.top().Namespace, }) } func isFragment(data string) bool { return len(data) == 0 || data == "Fragment" } func isSlot(data string) bool { return data == "slot" } func isComponent(data string) bool { if strings.Contains(data, ".") { return true } return !isFragment(data) && data[0] >= 'A' && data[0] <= 'Z' } func isCustomElement(data string) bool { return strings.Contains(data, "-") } func (p *parser) isInsideHead() bool { n := p.oe.top() for n != nil { if n.DataAtom == a.Head { return true } n = n.Parent } return false } // addElement adds a child element based on the current token. func (p *parser) addElement() { p.addChild(&Node{ Type: ElementNode, DataAtom: p.tok.DataAtom, Data: p.tok.Data, Attr: p.tok.Attr, Fragment: isFragment(p.tok.Data), Component: isComponent(p.tok.Data), CustomElement: isCustomElement(p.tok.Data), HandledScript: false, Loc: p.generateLoc(), }) } // Section 12.2.4.3. func (p *parser) addFormattingElement() { tagAtom, attr := p.tok.DataAtom, p.tok.Attr p.addElement() // Implement the Noah's Ark clause, but with three per family instead of two. identicalElements := 0 findIdenticalElements: for i := len(p.afe) - 1; i >= 0; i-- { n := p.afe[i] if n.Type == scopeMarkerNode { break } if n.Type != ElementNode { continue } if n.Namespace != "" { continue } if n.DataAtom != tagAtom { continue } if len(n.Attr) != len(attr) { continue } compareAttributes: for _, t0 := range n.Attr { for _, t1 := range attr { if t0.Key == t1.Key && t0.Namespace == t1.Namespace && t0.Val == t1.Val { // Found a match for this attribute, continue with the next attribute. continue compareAttributes } } // If we get here, there is no attribute that matches a. // Therefore the element is not identical to the new one. continue findIdenticalElements } identicalElements++ if identicalElements >= 3 { p.afe.remove(n) } } p.afe = append(p.afe, p.top()) } // Section 12.2.4.3. func (p *parser) clearActiveFormattingElements() { for { if n := p.afe.pop(); len(p.afe) == 0 || n.Type == scopeMarkerNode { return } } } // Section 12.2.4.3. func (p *parser) reconstructActiveFormattingElements() { n := p.afe.top() if n == nil { return } if n.Type == scopeMarkerNode || p.oe.index(n) != -1 { return } i := len(p.afe) - 1 for n.Type != scopeMarkerNode && p.oe.index(n) == -1 { if i == 0 { i = -1 break } i-- n = p.afe[i] } for { i++ clone := p.afe[i].clone() p.addChild(clone) p.afe[i] = clone if i == len(p.afe)-1 { break } } } // Section 12.2.5. func (p *parser) acknowledgeSelfClosingTag() { p.hasSelfClosingToken = false } // An insertion mode (section 12.2.4.1) is the state transition function from // a particular state in the HTML5 parser's state machine. It updates the // parser's fields depending on parser.tok (where ErrorToken means EOF). // It returns whether the token was consumed. type insertionMode func(*parser) bool // setOriginalIM sets the insertion mode to return to after completing a text or // inTableText insertion mode. // Section 12.2.4.1, "using the rules for". func (p *parser) setOriginalIM() { if p.originalIM != nil { panic("html: bad parser state: originalIM was set twice") } p.originalIM = p.im } // Section 12.2.4.1, "reset the insertion mode". func (p *parser) resetInsertionMode() { for i := len(p.oe) - 1; i >= 0; i-- { n := p.oe[i] last := i == 0 if last && p.context != nil { n = p.context } switch n.DataAtom { case a.Select: if !last { for ancestor, first := n, p.oe[0]; ancestor != first; { ancestor = p.oe[p.oe.index(ancestor)-1] switch ancestor.DataAtom { case a.Template: p.im = inSelectIM return case a.Table: p.im = inSelectInTableIM return } } } p.im = inSelectIM case a.Td, a.Th: // TODO: remove this divergence from the HTML5 spec. // // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 p.im = inCellIM case a.Tr: p.im = inRowIM case a.Tbody, a.Thead, a.Tfoot: p.im = inTableBodyIM case a.Caption: p.im = inCaptionIM case a.Colgroup: p.im = inColumnGroupIM case a.Table: p.im = inTableIM case a.Template: // TODO: remove this divergence from the HTML5 spec. if n.Namespace != "" { continue } p.im = p.templateStack.top() case a.Head: // TODO: remove this divergence from the HTML5 spec. // // See https://bugs.chromium.org/p/chromium/issues/detail?id=829668 p.im = inHeadIM case a.Body: p.im = inBodyIM case a.Frameset: p.im = inFramesetIM case a.Html: if p.head == nil { p.im = beforeHeadIM } else { p.im = afterHeadIM } default: if last { p.im = inBodyIM return } continue } return } } const whitespace = " \t\r\n\f" // Section 12.2.6.4.1. func initialIM(p *parser) bool { switch p.tok.Type { case FrontmatterFenceToken: p.setOriginalIM() p.im = frontmatterIM return false case TextToken: p.tok.Data = strings.TrimLeft(p.tok.Data, whitespace) if len(p.tok.Data) == 0 { // It was all whitespace, so ignore it. return true } case CommentToken: p.doc.AppendChild(&Node{ Type: CommentNode, Data: p.tok.Data, Loc: p.generateLoc(), }) return true case DoctypeToken: n, quirks := parseDoctype(p.tok.Data) p.doc.AppendChild(n) p.quirks = quirks p.im = beforeHTMLIM return true } if p.frontmatterState == FrontmatterInitial { p.addFrontmatter(true) } p.quirks = true p.im = beforeHTMLIM return false } // Section 12.2.6.4.2. func beforeHTMLIM(p *parser) bool { switch p.tok.Type { case DoctypeToken: // Ignore the token. return true case TextToken: p.tok.Data = strings.TrimLeft(p.tok.Data, whitespace) if len(p.tok.Data) == 0 { // It was all whitespace, so ignore it. return true } case StartTagToken: switch p.tok.DataAtom { case a.Html: p.addElement() p.im = beforeHeadIM return true case a.Script: p.addElement() if p.originalIM == nil { p.setOriginalIM() } p.im = textIM if p.hasSelfClosingToken { p.addLoc() p.oe.pop() p.acknowledgeSelfClosingTag() } return true } if isComponent(p.tok.Data) { p.addElement() p.im = inBodyIM if p.hasSelfClosingToken { p.oe.pop() p.acknowledgeSelfClosingTag() } return true } if p.literal { p.im = inLiteralIM p.originalIM = beforeHTMLIM return false } case EndTagToken: switch p.tok.DataAtom { case a.Script: p.oe.pop() return true case a.Head, a.Body, a.Html, a.Br: p.parseImpliedToken(StartTagToken, a.Html, a.Html.String()) return false default: // Ignore the token. return true } case CommentToken: p.doc.AppendChild(&Node{ Type: CommentNode, Data: p.tok.Data, Loc: p.generateLoc(), }) return true } p.parseImpliedToken(StartTagToken, a.Html, a.Html.String()) return false } // Section 12.2.6.4.3. func beforeHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: p.addText(p.tok.Data) return true case StartTagToken: switch p.tok.DataAtom { case a.Slot: p.addElement() if p.hasSelfClosingToken { p.addLoc() p.oe.pop() p.acknowledgeSelfClosingTag() } return true case a.Head: p.addElement() p.head = p.top() p.im = inHeadIM return true case a.Html: return inBodyIM(p) } case EndTagToken: switch p.tok.DataAtom { case a.Head, a.Body, a.Html, a.Br: p.parseImpliedToken(StartTagToken, a.Head, a.Head.String()) return false default: // Ignore the token. return true } case CommentToken: p.addChild(&Node{ Type: CommentNode, Data: p.tok.Data, Loc: p.generateLoc(), }) return true case DoctypeToken: // Ignore the token. return true case StartExpressionToken: p.parseImpliedToken(StartTagToken, a.Head, a.Head.String()) p.addExpression() p.afe = append(p.afe, &scopeMarker) p.setOriginalIM() p.im = inExpressionIM return true } p.parseImpliedToken(StartTagToken, a.Head, a.Head.String()) return false } // Section 12.2.6.4.4. func inHeadIM(p *parser) bool { switch p.tok.Type { case TextToken: s := strings.TrimLeft(p.tok.Data, whitespace) if len(s) < len(p.tok.Data) { // Add the initial whitespace to the current node. p.addText(p.tok.Data[:len(p.tok.Data)-len(s)]) if s == "" { return true } p.tok.Data = s return textIM(p) } else if p.oe.top() != nil && (isComponent(p.oe.top().Data) || isFragment((p.oe.top().Data))) { p.addText(p.tok.Data) return true } case StartTagToken: // Allow components in Head if isComponent(p.tok.Data) || isFragment(p.tok.Data) { p.im = inLiteralIM p.originalIM = inHeadIM p.exitLiteralIM = getExitLiteralFunc(p) return false } switch p.tok.DataAtom { case a.Html: return inBodyIM(p) case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta: p.addElement() p.oe.pop() p.acknowledgeSelfClosingTag() return true case a.Slot: p.addElement() if p.hasSelfClosingToken { p.addLoc() p.oe.pop() p.acknowledgeSelfClosingTag() } return true case a.Noscript: p.addElement() p.im = inHeadNoscriptIM // Don't let the tokenizer go into raw text mode when scripting is disabled. p.tokenizer.NextIsNotRawText() return true case a.Script, a.Title: p.addElement() if p.originalIM == nil { p.setOriginalIM() } p.im = textIM if p.hasSelfClosingToken { p.addLoc() p.oe.pop() p.acknowledgeSelfClosingTag() } return true case a.Noframes, a.Style: p.parseGenericRawTextElement() if p.hasSelfClosingToken { p.addLoc() p.oe.pop() p.acknowledgeSelfClosingTag() } return true case a.Head: // Ignore the token, but copy attributes and remove implicit marker // if an explicit tag is encountered (fixes issue #1124) if p.head != nil { copyAttributes(p.head, p.tok) removeImplicitMarker(p.head) } return true case a.Template: // TODO: remove this divergence from the HTML5 spec. // // We don't handle all of the corner cases when mixing foreign // content (i.e. or ) with tag. case a.Template: return inHeadIM(p) default: // Ignore the token. p.addLoc() return true } case CommentToken: p.addChild(&Node{ Type: CommentNode, Data: p.tok.Data, Loc: p.generateLoc(), }) return true case DoctypeToken: // Ignore the token. return true case StartExpressionToken: p.addExpression() p.afe = append(p.afe, &scopeMarker) return true case EndExpressionToken: p.addLoc() p.oe.pop() // Ignore the token. return true } p.parseImpliedToken(StartTagToken, a.Body, a.Body.String()) p.framesetOK = true return false } // copyAttributes copies attributes of src not found on dst to dst. func copyAttributes(dst *Node, src Token) { if len(src.Attr) == 0 { return } attr := map[string]string{} for _, t := range dst.Attr { attr[t.Key] = t.Val } for _, t := range src.Attr { if _, ok := attr[t.Key]; !ok { dst.Attr = append(dst.Attr, t) attr[t.Key] = t.Val } } } // removeImplicitMarker removes the ImplicitNodeMarker attribute from a node. // This is used when an explicit tag is encountered for a node that was // previously created implicitly. func removeImplicitMarker(n *Node) { for i, attr := range n.Attr { if attr.Key == ImplicitNodeMarker { n.Attr = append(n.Attr[:i], n.Attr[i+1:]...) return } } } // Section 12.2.6.4.7. func inBodyIM(p *parser) bool { switch p.tok.Type { case FrontmatterFenceToken: // If originalIM is already set, we have a closing fence without an opening one if p.originalIM != nil { p.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_MISSING_FRONTMATTER_FENCE, Text: "The closing frontmatter fence (---) is missing an opening fence", Hint: "Add --- at the beginning of your file before any import statements or code", Range: loc.Range{ Loc: p.tok.Loc, Len: 3, }, }) return true } p.setOriginalIM() p.im = frontmatterIM return false case TextToken: d := p.tok.Data switch n := p.top(); n.DataAtom { case a.Pre, a.Listing: if n.FirstChild == nil { // Ignore a newline at the start of a
 block.
				if d != "" && d[0] == '\r' {
					d = d[1:]
				}
				if d != "" && d[0] == '\n' {
					d = d[1:]
				}
			}
		}
		d = strings.Replace(d, "\x00", "", -1)
		if d == "" {
			return true
		}
		p.reconstructActiveFormattingElements()
		p.addText(d)
		if p.framesetOK && strings.TrimLeft(d, whitespace) != "" {
			// There were non-whitespace characters inserted.
			p.framesetOK = false
		}
	case StartTagToken:
		// if literal and we only have html and body open
		if p.literal {
			p.im = inLiteralIM
			p.originalIM = inBodyIM
			return false
		}
		// Handle  tag first before the originalIM check, so that
		// the implicit  is not converted to  when an explicit
		//  tag is encountered (fixes issue #1124)
		if p.tok.DataAtom == a.Html {
			if p.inTemplateFragmentContext() {
				p.addElement()
				return true
			}
			if p.oe.contains(a.Template) {
				return true
			}
			if len(p.oe) > 0 {
				copyAttributes(p.oe[0], p.tok)
				// Remove ImplicitNodeMarker if an explicit  tag is encountered
				// This ensures that when a JSX comment appears between DOCTYPE and ,
				// the explicit  tag is properly rendered (fixes issue #1124)
				removeImplicitMarker(p.oe[0])
			}
			// Clear originalIM so that the subsequent  tag is not converted to 
			p.originalIM = nil
			return true
		}
		// It's possible we were moved here from inHeadIM
		// via the children of a Component. We need to clear the originalIM
		// and switch the implicit `head` tag to `body`
		if p.originalIM != nil {
			i := p.indexOfElementInScope(defaultScope, a.Head)
			if i != -1 {
				p.oe[i].Data = "body"
				p.oe[i].DataAtom = a.Body
				p.originalIM = nil
			}
		}
		switch p.tok.DataAtom {
		case a.Html:
			// Already handled above
		case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title:
			return inHeadIM(p)
		case a.Body:
			if p.inTemplateFragmentContext() {
				p.addElement()
				return true
			}
			if p.oe.contains(a.Template) {
				return true
			}
			if len(p.oe) > 1 {
				body := p.oe[1]
				if body.Type == ElementNode && body.DataAtom == a.Body {
					p.framesetOK = false
					copyAttributes(body, p.tok)
				}
			}
		case a.Frameset:
			if !p.framesetOK || len(p.oe) < 2 || p.oe[1].DataAtom != a.Body {
				// Ignore the token.
				return true
			}
			body := p.oe[1]
			if body.Parent != nil {
				body.Parent.RemoveChild(body)
			}
			p.oe = p.oe[:1]
			p.addElement()
			p.im = inFramesetIM
			return true
		case a.Address, a.Article, a.Aside, a.Blockquote, a.Center, a.Details, a.Dialog, a.Dir, a.Div, a.Dl, a.Fieldset, a.Figcaption, a.Figure, a.Footer, a.Header, a.Hgroup, a.Main, a.Menu, a.Nav, a.Ol, a.P, a.Section, a.Summary, a.Ul:
			p.popUntil(buttonScope, a.P)
			p.addElement()
		case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6:
			p.popUntil(buttonScope, a.P)
			switch n := p.top(); n.DataAtom {
			case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6:
				p.oe.pop()
			}
			p.addElement()
		case a.Pre, a.Listing:
			p.popUntil(buttonScope, a.P)
			p.addElement()
			// The newline, if any, will be dealt with by the TextToken case.
			p.framesetOK = false
		case a.Form:
			if p.form != nil && !p.oe.contains(a.Template) {
				// Ignore the token
				return true
			}
			p.popUntil(buttonScope, a.P)
			p.addElement()
			if !p.oe.contains(a.Template) {
				p.form = p.top()
			}
		case a.Li:
			p.framesetOK = false
			for i := len(p.oe) - 1; i >= 0; i-- {
				node := p.oe[i]
				switch node.DataAtom {
				case a.Li:
					p.oe = p.oe[:i]
				case a.Address, a.Div, a.P:
					continue
				default:
					if !isSpecialElement(node) {
						continue
					}
				}
				break
			}
			p.popUntil(buttonScope, a.P)
			p.addElement()
		case a.Dd, a.Dt:
			p.framesetOK = false
			for i := len(p.oe) - 1; i >= 0; i-- {
				node := p.oe[i]
				switch node.DataAtom {
				case a.Dd, a.Dt:
					p.oe = p.oe[:i]
				case a.Address, a.Div, a.P:
					continue
				default:
					if !isSpecialElement(node) {
						continue
					}
				}
				break
			}
			p.popUntil(buttonScope, a.P)
			p.addElement()
		case a.Plaintext:
			p.popUntil(buttonScope, a.P)
			p.addElement()
		case a.Button:
			p.popUntil(defaultScope, a.Button)
			p.reconstructActiveFormattingElements()
			p.addElement()
			p.framesetOK = false
		case a.A:
			for i := len(p.afe) - 1; i >= 0 && p.afe[i].Type != scopeMarkerNode; i-- {
				if n := p.afe[i]; n.Type == ElementNode && n.DataAtom == a.A {
					p.inBodyEndTagFormatting(a.A, "a")
					p.oe.remove(n)
					p.afe.remove(n)
					break
				}
			}
			p.reconstructActiveFormattingElements()
			p.addFormattingElement()
			if p.hasSelfClosingToken {
				p.afe.pop()
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		case a.B, a.Big, a.Code, a.Em, a.Font, a.I, a.S, a.Small, a.Strike, a.Strong, a.Tt, a.U:
			p.reconstructActiveFormattingElements()
			p.addFormattingElement()
			if p.hasSelfClosingToken {
				p.afe.pop()
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		case a.Nobr:
			p.reconstructActiveFormattingElements()
			if p.elementInScope(defaultScope, a.Nobr) {
				p.inBodyEndTagFormatting(a.Nobr, "nobr")
				p.reconstructActiveFormattingElements()
			}
			p.addFormattingElement()
			if p.hasSelfClosingToken {
				p.afe.pop()
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		case a.Applet, a.Marquee, a.Object:
			p.reconstructActiveFormattingElements()
			p.addElement()
			p.afe = append(p.afe, &scopeMarker)
			p.framesetOK = false
		case a.Table:
			if !p.quirks {
				p.popUntil(buttonScope, a.P)
			}
			p.addElement()
			p.framesetOK = false
			p.im = inTableIM
			return true
		case a.Area, a.Br, a.Embed, a.Img, a.Input, a.Keygen, a.Wbr:
			p.reconstructActiveFormattingElements()
			p.addElement()
			p.oe.pop()
			p.acknowledgeSelfClosingTag()
			if p.tok.DataAtom == a.Input {
				for _, t := range p.tok.Attr {
					if t.Key == "type" {
						if strings.ToLower(t.Val) == "hidden" {
							// Skip setting framesetOK = false
							return true
						}
					}
				}
			}
			p.framesetOK = false
		case a.Param, a.Source, a.Track:
			p.addElement()
			p.oe.pop()
			p.acknowledgeSelfClosingTag()
		case a.Hr:
			p.popUntil(buttonScope, a.P)
			p.addElement()
			p.oe.pop()
			p.acknowledgeSelfClosingTag()
			p.framesetOK = false
		case a.Image:
			p.tok.DataAtom = a.Img
			p.tok.Data = a.Img.String()
			return false
		case a.Textarea:
			p.addElement()
			p.originalIM = inBodyIM
			p.framesetOK = false
			p.im = textIM
		case a.Xmp:
			p.popUntil(buttonScope, a.P)
			p.reconstructActiveFormattingElements()
			p.framesetOK = false
			p.parseGenericRawTextElement()
		case a.Iframe:
			p.framesetOK = false
			p.parseGenericRawTextElement()
		case a.Noembed:
			p.parseGenericRawTextElement()
		case a.Noscript:
			if p.scripting {
				p.parseGenericRawTextElement()
				return true
			}
			p.reconstructActiveFormattingElements()
			p.addElement()
			// Don't let the tokenizer go into raw text mode when scripting is disabled.
			p.tokenizer.NextIsNotRawText()
		case a.Select:
			p.reconstructActiveFormattingElements()
			p.addElement()
			p.framesetOK = false
			p.im = inSelectIM
			return true
		case a.Optgroup, a.Option:
			if p.top().DataAtom == a.Option {
				p.oe.pop()
			}
			p.reconstructActiveFormattingElements()
			p.addElement()
			if p.hasSelfClosingToken {
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		case a.Rb, a.Rtc:
			if p.elementInScope(defaultScope, a.Ruby) {
				p.generateImpliedEndTags()
			}
			p.addElement()
			if p.hasSelfClosingToken {
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		case a.Rp, a.Rt:
			if p.elementInScope(defaultScope, a.Ruby) {
				p.generateImpliedEndTags("rtc")
			}
			p.addElement()
			if p.hasSelfClosingToken {
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		case a.Math, a.Svg:
			p.reconstructActiveFormattingElements()
			if p.tok.DataAtom == a.Math {
				adjustAttributeNames(p.tok.Attr, mathMLAttributeAdjustments)
			} else {
				adjustAttributeNames(p.tok.Attr, svgAttributeAdjustments)
			}
			adjustForeignAttributes(p.tok.Attr)
			p.addElement()
			p.top().Namespace = p.tok.Data
			if p.hasSelfClosingToken {
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
			return true
		case a.Head:
			if p.inTemplateFragmentContext() {
				p.addElement()
				p.im = inHeadIM
				return true
			}
		default:
			// Special handling for selectedcontent as a void element
			if p.tok.Data == "selectedcontent" {
				p.reconstructActiveFormattingElements()
				p.addElement()
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
				return true
			}
			p.reconstructActiveFormattingElements()
			p.addElement()
			if p.hasSelfClosingToken {
				p.oe.pop()
				p.acknowledgeSelfClosingTag()
			}
		}
		if p.hasSelfClosingToken {
			p.oe.pop()
			p.acknowledgeSelfClosingTag()
		}
	case EndTagToken:
		if isComponent(p.tok.Data) {
			p.addLoc()
			p.oe.pop()
			return true
		}

		// Special handling for selectedcontent end tag - just ignore it
		// since it's treated as a void element
		if p.tok.Data == "selectedcontent" {
			return true
		}

		switch p.tok.DataAtom {
		case a.Body:
			p.addLoc()
			if p.elementInScope(defaultScope, a.Body) {
				p.im = afterBodyIM
			}
			if p.literal {
				p.oe.pop()
			}
		case a.Html:
			p.addLoc()
			if p.elementInScope(defaultScope, a.Body) {
				p.parseImpliedToken(EndTagToken, a.Body, a.Body.String())
				return false
			}
			if p.literal {
				p.oe.pop()
			}
			return true
		case a.Address, a.Article, a.Aside, a.Blockquote, a.Button, a.Center, a.Details, a.Dialog, a.Dir, a.Div, a.Dl, a.Fieldset, a.Figcaption, a.Figure, a.Footer, a.Header, a.Hgroup, a.Listing, a.Main, a.Menu, a.Nav, a.Ol, a.Pre, a.Section, a.Summary, a.Ul:
			p.addLoc()
			p.popUntil(defaultScope, p.tok.DataAtom)
		case a.Form:
			if p.oe.contains(a.Template) {
				i := p.indexOfElementInScope(defaultScope, a.Form)
				if i == -1 {
					// Ignore the token.
					return true
				}
				p.generateImpliedEndTags()
				if p.oe[i].DataAtom != a.Form {
					// Ignore the token.
					return true
				}
				p.addLoc()
				p.popUntil(defaultScope, a.Form)
			} else {
				node := p.form
				p.form = nil
				i := p.indexOfElementInScope(defaultScope, a.Form)
				if node == nil || i == -1 || p.oe[i] != node {
					// Ignore the token.
					return true
				}
				p.generateImpliedEndTags()
				p.oe.remove(node)
			}
		case a.P:
			if !p.elementInScope(buttonScope, a.P) {
				p.parseImpliedToken(StartTagToken, a.P, a.P.String())
			}
			p.addLoc()
			p.popUntil(buttonScope, a.P)
		case a.Li:
			p.addLoc()
			p.popUntil(listItemScope, a.Li)
		case a.Dd, a.Dt:
			p.addLoc()
			p.popUntil(defaultScope, p.tok.DataAtom)
		case a.H1, a.H2, a.H3, a.H4, a.H5, a.H6:
			p.addLoc()
			p.popUntil(defaultScope, a.H1, a.H2, a.H3, a.H4, a.H5, a.H6)
		case a.A, a.B, a.Big, a.Code, a.Em, a.Font, a.I, a.Nobr, a.S, a.Small, a.Strike, a.Strong, a.Tt, a.U:
			p.addLoc()
			p.inBodyEndTagFormatting(p.tok.DataAtom, p.tok.Data)
		case a.Applet, a.Marquee, a.Object:
			p.addLoc()
			if p.popUntil(defaultScope, p.tok.DataAtom) {
				p.clearActiveFormattingElements()
			}
		case a.Br:
			p.tok.Type = StartTagToken
			return false
		case a.Base, a.Basefont, a.Bgsound, a.Link, a.Meta, a.Noframes, a.Script, a.Style, a.Template, a.Title:
			return inHeadIM(p)
		default:
			p.inBodyEndTagOther(p.tok.DataAtom, p.tok.Data)
		}
	case CommentToken:
		p.addChild(&Node{
			Type: CommentNode,
			Data: p.tok.Data,
			Loc:  p.generateLoc(),
		})
	case StartExpressionToken:
		p.addExpression()
		p.afe = append(p.afe, &scopeMarker)
		p.originalIM = inBodyIM
		p.im = inExpressionIM
		return true
	case EndExpressionToken:
		p.addLoc()
		p.oe.pop()
		return true
	case ErrorToken:
		// TODO: remove this divergence from the HTML5 spec.
		if len(p.templateStack) > 0 {
			p.im = inTemplateIM
			return false
		}
		for _, e := range p.oe {
			switch e.DataAtom {
			case a.Dd, a.Dt, a.Li, a.Optgroup, a.Option, a.P, a.Rb, a.Rp, a.Rt, a.Rtc, a.Tbody, a.Td, a.Tfoot, a.Th,
				a.Thead, a.Tr, a.Body, a.Html:
			default:
				return true
			}
		}
	}
	if p.frontmatterState == FrontmatterInitial {
		p.addFrontmatter(true)
	}
	return true
}

func (p *parser) inTemplateFragmentContext() bool {
	return len(p.oe) == 1 && p.context != nil && p.context.DataAtom == a.Template
}

func (p *parser) inBodyEndTagFormatting(tagAtom a.Atom, tagName string) {
	// This is the "adoption agency" algorithm, described at
	// https://html.spec.whatwg.org/multipage/syntax.html#adoptionAgency

	// TODO: this is a fairly literal line-by-line translation of that algorithm.
	// Once the code successfully parses the comprehensive test suite, we should
	// refactor this code to be more idiomatic.

	// Steps 1-2
	if current := p.oe.top(); current.Data == tagName && p.afe.index(current) == -1 {
		p.oe.pop()
		return
	}

	// Steps 3-5. The outer loop.
	for i := 0; i < 8; i++ {
		// Step 6. Find the formatting element.
		var formattingElement *Node
		for j := len(p.afe) - 1; j >= 0; j-- {
			if p.afe[j].Type == scopeMarkerNode {
				break
			}
			if p.afe[j].DataAtom == tagAtom {
				formattingElement = p.afe[j]
				break
			}
		}
		if formattingElement == nil {
			p.inBodyEndTagOther(tagAtom, tagName)
			return
		}

		// Step 7. Ignore the tag if formatting element is not in the stack of open elements.
		feIndex := p.oe.index(formattingElement)
		if feIndex == -1 {
			p.afe.remove(formattingElement)
			return
		}
		// Step 8. Ignore the tag if formatting element is not in the scope.
		if !p.elementInScope(defaultScope, tagAtom) {
			// Ignore the tag.
			return
		}

		// Step 9. This step is omitted because it's just a parse error but no need to return.

		// Steps 10-11. Find the furthest block.
		var furthestBlock *Node
		for _, e := range p.oe[feIndex:] {
			if isSpecialElement(e) {
				furthestBlock = e
				break
			}
		}
		if furthestBlock == nil {
			e := p.oe.pop()
			for e != formattingElement {
				e = p.oe.pop()
			}
			p.afe.remove(e)
			return
		}

		// Steps 12-13. Find the common ancestor and bookmark node.
		commonAncestor := p.oe[feIndex-1]
		bookmark := p.afe.index(formattingElement)

		// Step 14. The inner loop. Find the lastNode to reparent.
		lastNode := furthestBlock
		node := furthestBlock
		x := p.oe.index(node)
		// Step 14.1.
		j := 0
		for {
			// Step 14.2.
			j++
			// Step. 14.3.
			x--
			node = p.oe[x]
			// Step 14.4. Go to the next step if node is formatting element.
			if node == formattingElement {
				break
			}
			// Step 14.5. Remove node from the list of active formatting elements if
			// inner loop counter is greater than three and node is in the list of
			// active formatting elements.
			if ni := p.afe.index(node); j > 3 && ni > -1 {
				p.afe.remove(node)
				// If any element of the list of active formatting elements is removed,
				// we need to take care whether bookmark should be decremented or not.
				// This is because the value of bookmark may exceed the size of the
				// list by removing elements from the list.
				if ni <= bookmark {
					bookmark--
				}
				continue
			}
			// Step 14.6. Continue the next inner loop if node is not in the list of
			// active formatting elements.
			if p.afe.index(node) == -1 {
				p.oe.remove(node)
				continue
			}
			// Step 14.7.
			clone := node.clone()
			p.afe[p.afe.index(node)] = clone
			p.oe[p.oe.index(node)] = clone
			node = clone
			// Step 14.8.
			if lastNode == furthestBlock {
				bookmark = p.afe.index(node) + 1
			}
			// Step 14.9.
			if lastNode.Parent != nil {
				lastNode.Parent.RemoveChild(lastNode)
			}
			node.AppendChild(lastNode)
			// Step 14.10.
			lastNode = node
		}

		// Step 15. Reparent lastNode to the common ancestor,
		// or for misnested table nodes, to the foster parent.
		if lastNode.Parent != nil {
			lastNode.Parent.RemoveChild(lastNode)
		}
		switch commonAncestor.DataAtom {
		case a.Table, a.Tbody, a.Tfoot, a.Thead, a.Tr:
			p.fosterParent(lastNode)
		default:
			commonAncestor.AppendChild(lastNode)
		}

		// Steps 16-18. Reparent nodes from the furthest block's children
		// to a clone of the formatting element.
		clone := formattingElement.clone()
		reparentChildren(clone, furthestBlock)
		furthestBlock.AppendChild(clone)

		// Step 19. Fix up the list of active formatting elements.
		if oldLoc := p.afe.index(formattingElement); oldLoc != -1 && oldLoc < bookmark {
			// Move the bookmark with the rest of the list.
			bookmark--
		}
		p.afe.remove(formattingElement)
		p.afe.insert(bookmark, clone)

		// Step 20. Fix up the stack of open elements.
		p.oe.remove(formattingElement)
		p.oe.insert(p.oe.index(furthestBlock)+1, clone)
	}
}

// inBodyEndTagOther performs the "any other end tag" algorithm for inBodyIM.
// "Any other end tag" handling from 12.2.6.5 The rules for parsing tokens in foreign content
// https://html.spec.whatwg.org/multipage/syntax.html#parsing-main-inforeign
func (p *parser) inBodyEndTagOther(tagAtom a.Atom, tagName string) {
	for i := len(p.oe) - 1; i >= 0; i-- {
		// Two element nodes have the same tag if they have the same Data (a
		// string-typed field). As an optimization, for common HTML tags, each
		// Data string is assigned a unique, non-zero DataAtom (a uint32-typed
		// field), since integer comparison is faster than string comparison.
		// Uncommon (custom) tags get a zero DataAtom.
		//
		// The if condition here is equivalent to (p.oe[i].Data == tagName).
		if (p.oe[i].DataAtom == tagAtom) &&
			((tagAtom != 0) || (p.oe[i].Data == tagName)) {
			p.addLoc()
			// If we only have a single element, just ignore it
			if len(p.oe) > 1 {
				p.oe = p.oe[:i]
			}
			break
		}
		if isSpecialElement(p.oe[i]) {
			p.addLoc()
			break
		}
	}
}

// Section 12.2.6.4.8.
func textIM(p *parser) bool {
	switch p.tok.Type {
	case ErrorToken:
		if p.inTemplateFragmentContext() {
			return inBodyIM(p)
		}
	case TextToken:
		d := p.tok.Data
		if n := p.oe.top(); n != nil && n.DataAtom == a.Textarea && n.FirstChild == nil {
			// Ignore a newline at the start of a 
```

## Output

```js
import {
  Fragment,
  render as $$render,
  createAstro as $$createAstro,
  createComponent as $$createComponent,
  renderComponent as $$renderComponent,
  renderHead as $$renderHead,
  maybeRenderHead as $$maybeRenderHead,
  unescapeHTML as $$unescapeHTML,
  renderSlot as $$renderSlot,
  mergeSlots as $$mergeSlots,
  addAttribute as $$addAttribute,
  spreadAttributes as $$spreadAttributes,
  defineStyleVars as $$defineStyleVars,
  defineScriptVars as $$defineScriptVars,
  renderTransition as $$renderTransition,
  createTransitionScope as $$createTransitionScope,
  renderScript as $$renderScript,
  createMetadata as $$createMetadata
} from "http://localhost:3000/";

export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] });

const $$Component = $$createComponent(($$result, $$props, $$slots) => {

const value = 'test';

return $$render`${$$maybeRenderHead($$result)}`;
}, undefined, undefined);
export default $$Component;
```
---


================================================
FILE: internal/printer/__printer_js__/textarea_in_form.snap
================================================

[TestPrinter/textarea_in_form - 1]
## Input

```
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$renderComponent($$result,'Component',Component,{},{"default": () => $$render`${$$maybeRenderHead($$result)}
`,})}`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/textarea_inside_expression.snap ================================================ [TestPrinter/textarea_inside_expression - 1] ## Input ``` {bool && } {!bool && } ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${bool && $$render`${$$maybeRenderHead($$result)}`} ${!bool && $$render``}`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/th_expressions.snap ================================================ [TestPrinter/th_expressions - 1] ## Input ```
{title}
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}
${title}
`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/tr_only.snap ================================================ [TestPrinter/tr_only - 1] ## Input ``` col 1col 2{foo} ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}col 1col 2${foo}`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/trailing_expression.snap ================================================ [TestPrinter/trailing_expression - 1] ## Input ``` {} ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$renderComponent($$result,'Component',Component,{})}${(void 0)}`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_animate_on_Component.snap ================================================ [TestPrinter/transition:animate_on_Component - 1] ## Input ``` ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata("/projects/app/src/pages/page.astro", { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Page = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$renderComponent($$result,'Component',Component,{"class":"bar","data-astro-transition-scope":($$renderTransition($$result, "wkm5vset", "morph", ""))})}`; }, '/projects/app/src/pages/page.astro', 'self'); export default $$Page; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_animate_with_an_expression.snap ================================================ [TestPrinter/transition:animate_with_an_expression - 1] ## Input ```
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata("/projects/app/src/pages/page.astro", { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Page = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}
`; }, '/projects/app/src/pages/page.astro', 'self'); export default $$Page; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_name_with_an_expression.snap ================================================ [TestPrinter/transition:name_with_an_expression - 1] ## Input ```
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata("/projects/app/src/pages/page.astro", { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Page = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}`; }, '/projects/app/src/pages/page.astro', 'self'); export default $$Page; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_name_with_an_template_literal.snap ================================================ [TestPrinter/transition:name_with_an_template_literal - 1] ## Input ```
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata("/projects/app/src/pages/page.astro", { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Page = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}`; }, '/projects/app/src/pages/page.astro', 'self'); export default $$Page; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_persist-props_converted_to_a_data_attribute.snap ================================================ [TestPrinter/transition:persist-props_converted_to_a_data_attribute - 1] ## Input ``` ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$renderComponent($$result,'my-island','my-island',{"data-astro-transition-persist-props":"false","data-astro-transition-persist":($$createTransitionScope($$result, "otghnj5u"))})}`; }, undefined, 'self'); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_persist_converted_to_a_data_attribute.snap ================================================ [TestPrinter/transition:persist_converted_to_a_data_attribute - 1] ## Input ```
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}`; }, undefined, 'self'); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/transition_persist_uses_transition_name_if_defined.snap ================================================ [TestPrinter/transition:persist_uses_transition:name_if_defined - 1] ## Input ```
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import "transitions.css"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}
`; }, undefined, 'self'); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/type_import.snap ================================================ [TestPrinter/type_import - 1] ## Input ``` /-/-/-/ import type data from "test" /-/-/-/
{data}
``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; import type data from "test" export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`${$$maybeRenderHead($$result)}
${data}
`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/unusual_line_terminator_I.snap ================================================ [TestPrinter/unusual_line_terminator_I - 1] ## Input ``` Pre-set & Time-limited \u2028holiday campaigns ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`Pre-set & Time-limited \\u2028holiday campaigns`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/unusual_line_terminator_II.snap ================================================ [TestPrinter/unusual_line_terminator_II - 1] ## Input ``` Pre-set & Time-limited 
holiday campaigns ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render`Pre-set & Time-limited 
holiday campaigns`; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_js__/user-defined__implicit__is_printed.snap ================================================ [TestPrinter/user-defined_`implicit`_is_printed - 1] ## Input ``` ``` ## Output ```js import { Fragment, render as $$render, createAstro as $$createAstro, createComponent as $$createComponent, renderComponent as $$renderComponent, renderHead as $$renderHead, maybeRenderHead as $$maybeRenderHead, unescapeHTML as $$unescapeHTML, renderSlot as $$renderSlot, mergeSlots as $$mergeSlots, addAttribute as $$addAttribute, spreadAttributes as $$spreadAttributes, defineStyleVars as $$defineStyleVars, defineScriptVars as $$defineScriptVars, renderTransition as $$renderTransition, createTransitionScope as $$createTransitionScope, renderScript as $$renderScript, createMetadata as $$createMetadata } from "http://localhost:3000/"; export const $$metadata = $$createMetadata(import.meta.url, { modules: [], hydratedComponents: [], clientOnlyComponents: [], hydrationDirectives: new Set([]), hoisted: [] }); const $$Component = $$createComponent(($$result, $$props, $$slots) => { return $$render``; }, undefined, undefined); export default $$Component; ``` --- ================================================ FILE: internal/printer/__printer_json__/Comment.snap ================================================ [TestPrintToJSON/Comment - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"comment","value":"hello"}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Comment_preserves_whitespace.snap ================================================ [TestPrintToJSON/Comment_preserves_whitespace - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"comment","value":" hello "}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Component.snap ================================================ [TestPrintToJSON/Component - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"component","name":"Component","attributes":[],"children":[]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Doctype.snap ================================================ [TestPrintToJSON/Doctype - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"doctype","value":"html"}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Fragment_Literal.snap ================================================ [TestPrintToJSON/Fragment_Literal - 1] ## Input ``` World ``` ## Output ```json {"type":"root","children":[{"type":"fragment","name":"Fragment","attributes":[],"children":[{"type":"text","value":"World"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Fragment_Shorthand.snap ================================================ [TestPrintToJSON/Fragment_Shorthand - 1] ## Input ``` <>Hello ``` ## Output ```json {"type":"root","children":[{"type":"fragment","name":"","attributes":[],"children":[{"type":"text","value":"Hello"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Frontmatter.snap ================================================ [TestPrintToJSON/Frontmatter - 1] ## Input ``` /-/-/-/ const a = "hey" /-/-/-/
{a}
``` ## Output ```json {"type":"root","children":[{"type":"frontmatter","value":"\nconst a = \"hey\"\n"},{"type":"element","name":"div","attributes":[],"children":[{"type":"expression","children":[{"type":"text","value":"a"}]}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/JSON_escape.snap ================================================ [TestPrintToJSON/JSON_escape - 1] ## Input ``` /-/-/-/ const a = "\n" const b = "\"" const c = '\'' /-/-/-/ {a + b + c} ``` ## Output ```json {"type":"root","children":[{"type":"frontmatter","value":"\nconst a = \"\\n\"\nconst b = \"\\\"\"\nconst c = '\\''\n"},{"type":"expression","children":[{"type":"text","value":"a + b + c"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/Preserve_namespaces.snap ================================================ [TestPrintToJSON/Preserve_namespaces - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"element","name":"svg","attributes":[{"type":"attribute","kind":"quoted","name":"xmlns","value":"http://www.w3.org/2000/svg","raw":"\"http://www.w3.org/2000/svg\""},{"type":"attribute","kind":"quoted","name":"xmlns:xlink","value":"http://www.w3.org/1999/xlink","raw":"\"http://www.w3.org/1999/xlink\""}],"children":[{"type":"element","name":"rect","attributes":[{"type":"attribute","kind":"quoted","name":"xlink:href","value":"#id","raw":"\"#id\""}],"children":[]}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/basic.snap ================================================ [TestPrintToJSON/basic - 1] ## Input ```

Hello world!

``` ## Output ```json {"type":"root","children":[{"type":"element","name":"h1","attributes":[],"children":[{"type":"text","value":"Hello world!"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/custom-element.snap ================================================ [TestPrintToJSON/custom-element - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"custom-element","name":"custom-element","attributes":[],"children":[]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/element_with_unterminated_double_quote_attribute.snap ================================================ [TestPrintToJSON/element_with_unterminated_double_quote_attribute - 1] ## Input ```
``` ## Output ```json {"type":"root","children":[{"type":"element","name":"main","attributes":[{"type":"attribute","kind":"template-literal","name":"id","value":"gotcha","raw":"`gotcha"}],"children":[]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/expression.snap ================================================ [TestPrintToJSON/expression - 1] ## Input ```

Hello {world}

``` ## Output ```json {"type":"root","children":[{"type":"element","name":"h1","attributes":[],"children":[{"type":"text","value":"Hello "},{"type":"expression","children":[{"type":"text","value":"world"}]}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/jsx_comment_between_doctype_and_html.snap ================================================ [TestPrintToJSON/jsx_comment_between_doctype_and_html - 1] ## Input ``` {/* Comment */} ``` ## Output ```json {"type":"root","children":[{"type":"doctype","value":"html"},{"type":"expression","children":[{"type":"text","value":"/* Comment */"}]},{"type":"text","value":"\n"},{"type":"element","name":"html","attributes":[{"type":"attribute","kind":"quoted","name":"lang","value":"en","raw":"\"en\""}],"children":[{"type":"text","value":"\n\t"},{"type":"element","name":"head","attributes":[],"children":[{"type":"text","value":"\n\t\t"},{"type":"element","name":"meta","attributes":[{"type":"attribute","kind":"quoted","name":"charset","value":"UTF-8","raw":"\"UTF-8\""}],"children":[]},{"type":"text","value":"\n\t"}]},{"type":"text","value":"\n"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_after_body_with_component_in_head_and_body.snap ================================================ [TestPrintToJSON/style_after_body_with_component_in_head_and_body - 1] ## Input ```
``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[{"type":"attribute","kind":"quoted","name":"lang","value":"en","raw":"\"en\""}],"children":[{"type":"element","name":"head","attributes":[],"children":[{"type":"component","name":"BaseHead","attributes":[],"children":[]}]},{"type":"element","name":"body","attributes":[],"children":[{"type":"component","name":"Header","attributes":[],"children":[]}]},{"type":"element","name":"style","attributes":[],"children":[{"type":"text","value":"@use \"../styles/global.scss\";"}]}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_after_empty_html.snap ================================================ [TestPrintToJSON/style_after_empty_html - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[],"children":[]},{"type":"element","name":"style","attributes":[],"children":[]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_after_html.snap ================================================ [TestPrintToJSON/style_after_html - 1] ## Input ```

Hello world!

``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[],"children":[{"type":"element","name":"body","attributes":[],"children":[{"type":"element","name":"h1","attributes":[],"children":[{"type":"text","value":"Hello world!"}]}]}]},{"type":"element","name":"style","attributes":[],"children":[]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_after_html_with_component_in_head.snap ================================================ [TestPrintToJSON/style_after_html_with_component_in_head - 1] ## Input ``` ``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[{"type":"attribute","kind":"quoted","name":"lang","value":"en","raw":"\"en\""}],"children":[{"type":"element","name":"head","attributes":[],"children":[{"type":"component","name":"BaseHead","attributes":[],"children":[]}]}]},{"type":"element","name":"style","attributes":[],"children":[{"type":"text","value":"@use \"../styles/global.scss\";"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_after_html_with_component_in_head_and_body.snap ================================================ [TestPrintToJSON/style_after_html_with_component_in_head_and_body - 1] ## Input ```
``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[{"type":"attribute","kind":"quoted","name":"lang","value":"en","raw":"\"en\""}],"children":[{"type":"element","name":"head","attributes":[],"children":[{"type":"component","name":"BaseHead","attributes":[],"children":[]}]},{"type":"element","name":"body","attributes":[],"children":[{"type":"component","name":"Header","attributes":[],"children":[]}]}]},{"type":"element","name":"style","attributes":[],"children":[{"type":"text","value":"@use \"../styles/global.scss\";"}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_before_html.snap ================================================ [TestPrintToJSON/style_before_html - 1] ## Input ```

Hello world!

``` ## Output ```json {"type":"root","children":[{"type":"element","name":"style","attributes":[],"children":[]},{"type":"element","name":"html","attributes":[],"children":[{"type":"element","name":"body","attributes":[],"children":[{"type":"element","name":"h1","attributes":[],"children":[{"type":"text","value":"Hello world!"}]}]}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_in_body.snap ================================================ [TestPrintToJSON/style_in_body - 1] ## Input ```

Hello world!

``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[],"children":[{"type":"element","name":"body","attributes":[],"children":[{"type":"element","name":"h1","attributes":[],"children":[{"type":"text","value":"Hello world!"}]},{"type":"element","name":"style","attributes":[],"children":[]}]}]}]} ``` --- ================================================ FILE: internal/printer/__printer_json__/style_in_html.snap ================================================ [TestPrintToJSON/style_in_html - 1] ## Input ```

Hello world!

``` ## Output ```json {"type":"root","children":[{"type":"element","name":"html","attributes":[],"children":[{"type":"element","name":"body","attributes":[],"children":[{"type":"element","name":"h1","attributes":[],"children":[{"type":"text","value":"Hello world!"}]}]},{"type":"element","name":"style","attributes":[],"children":[]}]}]} ``` --- ================================================ FILE: internal/printer/print-css.go ================================================ package printer import ( "strings" . "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/sourcemap" "github.com/withastro/compiler/internal/transform" ) type PrintCSSResult struct { Output [][]byte SourceMapChunk sourcemap.Chunk } func PrintCSS(sourcetext string, doc *Node, opts transform.TransformOptions) PrintCSSResult { p := &printer{ opts: opts, builder: sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(sourcetext, len(strings.Split(sourcetext, "\n")))), } result := PrintCSSResult{ SourceMapChunk: p.builder.GenerateChunk(p.output), } if len(doc.Styles) > 0 { for _, style := range doc.Styles { if style.FirstChild != nil && strings.TrimSpace(style.FirstChild.Data) != "" { p.addSourceMapping(style.Loc[0]) p.print(strings.TrimSpace(style.FirstChild.Data)) result.Output = append(result.Output, p.output) p.output = []byte{} p.addNilSourceMapping() } } } return result } ================================================ FILE: internal/printer/print-to-js.go ================================================ // Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package printer import ( "bytes" "fmt" "sort" "strings" "unicode" . "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/helpers" "github.com/withastro/compiler/internal/js_scanner" "github.com/withastro/compiler/internal/loc" "github.com/withastro/compiler/internal/sourcemap" "github.com/withastro/compiler/internal/transform" "golang.org/x/net/html/atom" ) // Render renders the parse tree n to the given writer. // // Rendering is done on a 'best effort' basis: calling Parse on the output of // Render will always result in something similar to the original tree, but it // is not necessarily an exact clone unless the original tree was 'well-formed'. // 'Well-formed' is not easily specified; the HTML5 specification is // complicated. // // Calling Parse on arbitrary input typically results in a 'well-formed' parse // tree. However, it is possible for Parse to yield a 'badly-formed' parse tree. // For example, in a 'well-formed' parse tree, no element is a child of // another element: parsing "" results in two sibling elements. // Similarly, in a 'well-formed' parse tree, no element is a child of a // element: parsing "

" results in a

with two sibling // children; the is reparented to the

's parent. However, calling // Parse on "
" does not return an error, but the result has an // element with an child, and is therefore not 'well-formed'. // // Programmatically constructed trees are typically also 'well-formed', but it // is possible to construct a tree that looks innocuous but, when rendered and // re-parsed, results in a different tree. A simple example is that a solitary // text node would become a tree containing , and elements. // Another example is that the programmatic equivalent of "abc" // becomes "abc". func PrintToJS(sourcetext string, n *Node, cssLen int, opts transform.TransformOptions, h *handler.Handler) PrintResult { p := &printer{ sourcetext: sourcetext, opts: opts, builder: sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(sourcetext, len(strings.Split(sourcetext, "\n")))), handler: h, } return printToJs(p, n, cssLen, opts) } type RenderOptions struct { isRoot bool isExpression bool depth int cssLen int opts transform.TransformOptions printedMaybeHead *bool scriptCount *int } type ExtractedStatement struct { Content string Loc loc.Loc } func printToJs(p *printer, n *Node, cssLen int, opts transform.TransformOptions) PrintResult { printedMaybeHead := false scriptCount := 0 render1(p, n, RenderOptions{ cssLen: cssLen, isRoot: true, isExpression: false, depth: 0, opts: opts, printedMaybeHead: &printedMaybeHead, scriptCount: &scriptCount, }) return PrintResult{ Output: p.output, SourceMapChunk: p.builder.GenerateChunk(p.output), } } const whitespace = " \t\r\n\f" // Returns true if the expression only contains a comment block (e.g. {/* a comment */}) func expressionOnlyHasComment(n *Node) bool { if n.FirstChild == nil { return false } clean := helpers.RemoveComments(n.FirstChild.Data) trimmedData := strings.TrimLeft(n.FirstChild.Data, whitespace) result := n.FirstChild.NextSibling == nil && n.FirstChild.Type == TextNode && // RemoveComments iterates over text and most of the time we won't be parsing comments so lets check if text starts with /* or // before iterating (strings.HasPrefix(trimmedData, "/*") || strings.HasPrefix(trimmedData, "//")) && len(clean) == 0 return result } func emptyTextNodeWithoutSiblings(n *Node) bool { if strings.TrimSpace(n.Data) != "" { return false } if n.PrevSibling == nil { return n.NextSibling == nil || n.NextSibling.Expression } else { return n.PrevSibling.Expression } } func render1(p *printer, n *Node, opts RenderOptions) { depth := opts.depth if n.Transition { p.needsTransitionCSS = true } // Root of the document, print all children if n.Type == DocumentNode { p.printInternalImports(p.opts.InternalURL, &opts) if n.FirstChild != nil && n.FirstChild.Type != FrontmatterNode { p.printCSSImports(opts.cssLen) } for c := n.FirstChild; c != nil; c = c.NextSibling { render1(p, c, RenderOptions{ isRoot: false, isExpression: false, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) } p.printReturnClose() p.printFuncSuffix(opts.opts, n) return } // Decide whether to print code for `Astro` global variable. Use a loose check for now. printAstroGlobal := strings.Contains(p.sourcetext, "Astro") // Render frontmatter (will be the first node, if it exists) if n.Type == FrontmatterNode { if n.FirstChild == nil { p.printCSSImports(opts.cssLen) } for c := n.FirstChild; c != nil; c = c.NextSibling { if c.Type == TextNode { p.printInternalImports(p.opts.InternalURL, &opts) start := 0 if len(n.Loc) > 0 { start = c.Loc[0].Start } render := js_scanner.HoistImports([]byte(c.Data)) if len(render.Hoisted) > 0 { for i, hoisted := range render.Hoisted { if len(bytes.TrimSpace(hoisted)) == 0 { continue } hoistedLoc := render.HoistedLocs[i] p.printTextWithSourcemap(string(hoisted)+"\n", loc.Loc{Start: start + hoistedLoc.Start}) } } p.addNilSourceMapping() p.printCSSImports(opts.cssLen) // 1. Component imports, if any exist. p.addNilSourceMapping() p.printComponentMetadata(n.Parent, opts.opts, []byte(p.sourcetext)) // 2. Top-level Astro global. if printAstroGlobal { p.printTopLevelAstro(opts.opts) } exports := make([][]byte, 0) exportLocs := make([]loc.Loc, 0) bodies := make([][]byte, 0) bodiesLocs := make([]loc.Loc, 0) if len(render.Body) > 0 { for i, innerBody := range render.Body { innerStart := render.BodyLocs[i].Start if len(bytes.TrimSpace(innerBody)) == 0 { continue } // Extract exports preprocessed := js_scanner.HoistExports(append(innerBody, '\n')) if len(preprocessed.Hoisted) > 0 { for j, exported := range preprocessed.Hoisted { exportedLoc := preprocessed.HoistedLocs[j] exportLocs = append(exportLocs, loc.Loc{Start: start + innerStart + exportedLoc.Start}) exports = append(exports, exported) } } if len(preprocessed.Body) > 0 { for j, body := range preprocessed.Body { bodyLoc := preprocessed.BodyLocs[j] bodiesLocs = append(bodiesLocs, loc.Loc{Start: start + innerStart + bodyLoc.Start}) bodies = append(bodies, body) } } } } // PRINT EXPORTS if len(exports) > 0 { for i, exported := range exports { exportLoc := exportLocs[i] if len(bytes.TrimSpace(exported)) == 0 { continue } p.printTextWithSourcemap(string(exported), exportLoc) p.addNilSourceMapping() p.println("") } } p.printFuncPrelude(opts.opts, printAstroGlobal) // PRINT BODY if len(bodies) > 0 { for i, body := range bodies { bodyLoc := bodiesLocs[i] if len(bytes.TrimSpace(body)) == 0 { continue } p.printTextWithSourcemap(string(body), bodyLoc) } } // Print empty just to ensure a newline p.println("") if len(n.Parent.Styles) > 0 { definedVars := transform.GetDefineVars(n.Parent.Styles) if len(definedVars) > 0 { p.printf("const $$definedVars = %s([%s]);\n", DEFINE_STYLE_VARS, strings.Join(definedVars, ",")) } } p.printReturnOpen() } else { render1(p, c, RenderOptions{ isRoot: false, isExpression: true, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) if len(n.Loc) > 1 { p.addSourceMapping(loc.Loc{Start: n.Loc[1].Start - 3}) } } } return } else if !p.hasFuncPrelude { p.printComponentMetadata(n.Parent, opts.opts, []byte{}) if printAstroGlobal { p.printTopLevelAstro(opts.opts) } // Render func prelude. Will only run for the first non-frontmatter node p.printFuncPrelude(opts.opts, printAstroGlobal) // This just ensures a newline p.println("") // If we haven't printed the funcPrelude but we do have Styles/Scripts, we need to print them! if len(n.Parent.Styles) > 0 { definedVars := transform.GetDefineVars(n.Parent.Styles) if len(definedVars) > 0 { p.printf("const $$definedVars = %s([%s]);\n", DEFINE_STYLE_VARS, strings.Join(definedVars, ",")) } } p.printReturnOpen() } switch n.Type { case TextNode: if strings.TrimSpace(n.Data) == "" { p.addSourceMapping(n.Loc[0]) p.print(n.Data) return } text := escapeText(n.Data) p.printTextWithSourcemap(text, n.Loc[0]) return case ElementNode: // No-op. case CommentNode: start := n.Loc[0].Start - 4 p.addSourceMapping(loc.Loc{Start: start}) p.print("") return case DoctypeNode: // Doctype doesn't get printed because the Astro runtime always appends it return case RawNode: p.print(n.Data) return case RenderHeadNode: p.printMaybeRenderHead() *opts.printedMaybeHead = true return } // Tip! Comment this block out to debug expressions if n.Expression { if n.FirstChild == nil || emptyTextNodeWithoutSiblings(n.FirstChild) { p.print("${(void 0)") } else if expressionOnlyHasComment(n) { // we do not print expressions that only contain comment blocks return } else { p.print("${") } for c := n.FirstChild; c != nil; c = c.NextSibling { p.addSourceMapping(c.Loc[0]) if c.Type == TextNode { p.printTextWithSourcemap(c.Data, c.Loc[0]) continue } // Print the opening of a tagged render function before // a node, only when it meets either of these conditions: // - It does not have a previous sibling. // - It has a text node that contains more than just whitespace. // - It is the first child of its parent expression. if c.PrevSibling == nil || c.PrevSibling == n.FirstChild || (c.PrevSibling.Type == TextNode && strings.TrimSpace(c.PrevSibling.Data) != "") { p.printTemplateLiteralOpen() } render1(p, c, RenderOptions{ isRoot: false, isExpression: true, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) // Print the closing of a tagged render function after // a node, only when it meets either of these conditions: // - It does not have a next sibling. // - It has a text node that contains more than just whitespace. // - It is the last child of its parent expression. if c.NextSibling == nil || c.NextSibling == n.LastChild || (c.NextSibling.Type == TextNode && strings.TrimSpace(c.NextSibling.Data) != "") { p.printTemplateLiteralClose() } } if len(n.Loc) >= 2 { p.addSourceMapping(n.Loc[1]) } p.print("}") return } isFragment := n.Fragment isComponent := isFragment || n.Component || n.CustomElement isClientOnly := isComponent && transform.HasAttr(n, "client:only") isSlot := n.DataAtom == atom.Slot isImplicit := false isHandledScript := n.HandledScript for _, a := range n.Attr { if isSlot && a.Key == "is:inline" { isSlot = false } if transform.IsImplicitNodeMarker(a) { isImplicit = true } } p.addSourceMapping(n.Loc[0]) switch true { case isFragment: p.print(fmt.Sprintf("${%s(%s,'%s',", RENDER_COMPONENT, RESULT, "Fragment")) case isComponent: p.print(fmt.Sprintf("${%s(%s,'%s',", RENDER_COMPONENT, RESULT, n.Data)) case isSlot: p.print(fmt.Sprintf("${%s(%s,%s[", RENDER_SLOT, RESULT, SLOTS)) case isHandledScript: // import '/src/pages/index.astro?astro&type=script&index=0&lang.ts'; scriptUrl := fmt.Sprintf("%s?astro&type=script&index=%v&lang.ts", p.opts.Filename, *opts.scriptCount) resolvedScriptUrl := transform.ResolveIdForMatch(scriptUrl, &p.opts) escapedScriptUrl := escapeDoubleQuote(resolvedScriptUrl) p.print(fmt.Sprintf("${%s(%s,\"%s\")}", RENDER_SCRIPT, RESULT, escapedScriptUrl)) *opts.scriptCount++ return case isImplicit: // do nothing default: // Before the first non-head element, inject $$maybeRender($$result) // This is for pages that do not contain an explicit head element switch n.DataAtom { case atom.Html, atom.Head, atom.Base, atom.Basefont, atom.Bgsound, atom.Link, atom.Meta, atom.Noframes, atom.Script, atom.Style, atom.Template, atom.Title: break default: if !*opts.printedMaybeHead { *opts.printedMaybeHead = true p.printMaybeRenderHead() } } p.addSourceMapping(loc.Loc{Start: n.Loc[0].Start - 1}) p.print("<") } p.addSourceMapping(n.Loc[0]) switch true { case isFragment: p.print("Fragment") case isClientOnly: p.print("null") case !isSlot && n.CustomElement: p.print(fmt.Sprintf("'%s'", n.Data)) case !isSlot && !isImplicit: // Print the tag name p.print(n.Data) } p.addNilSourceMapping() if isImplicit { // do nothing } else if isComponent { maybeConvertTransition(n) p.print(",") p.printAttributesToObject(n) } else if isSlot { if len(n.Attr) == 0 { p.print(`"default"`) } else { slotted := false for _, a := range n.Attr { if a.Key != "name" { continue } switch a.Type { case QuotedAttribute: p.addSourceMapping(a.ValLoc) p.print(`"` + escapeDoubleQuote(a.Val) + `"`) slotted = true default: p.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_UNSUPPORTED_SLOT_ATTRIBUTE, Text: "slot[name] must be a static string", Range: loc.Range{Loc: a.ValLoc, Len: len(a.Val)}, }) } } if !slotted { p.print(`"default"`) } } p.print(`]`) } else { maybeConvertTransition(n) for _, a := range n.Attr { if transform.IsImplicitNodeMarker(a) || a.Key == "is:inline" { continue } if a.Key == "slot" { // Walk up the tree to find the nearest non-Expression ancestor // to determine if we're inside an Astro Component (slot should be stripped) // or a CustomElement/regular HTML (slot should be preserved) parent := n.Parent for parent != nil && parent.Expression { parent = parent.Parent } if parent != nil && parent.Component { continue } // Note: if we encounter "slot" NOT inside a component, that's fine // These should be preserved in the output (e.g., for web components) p.printAttribute(a, n) } else if a.Key == "data-astro-source-file" { p.printAttribute(a, n) var l []int if n.FirstChild != nil && len(n.FirstChild.Loc) > 0 { start := n.FirstChild.Loc[0].Start if n.FirstChild.Type == TextNode { start += len(n.Data) - len(strings.TrimLeftFunc(n.Data, unicode.IsSpace)) } l = p.builder.GetLineAndColumnForLocation(loc.Loc{Start: start}) } else if len(n.Loc) > 0 { l = p.builder.GetLineAndColumnForLocation(n.Loc[0]) } if len(l) > 0 { p.printAttribute(Attribute{ Key: "data-astro-source-loc", Type: QuotedAttribute, Val: fmt.Sprintf("%d:%d", l[0], l[1]), }, n) } p.addSourceMapping(n.Loc[0]) } else { p.printAttribute(a, n) p.addSourceMapping(n.Loc[0]) } } p.addSourceMapping(n.Loc[0]) p.print(">") } if voidElements[n.Data] { if n.FirstChild != nil { // return fmt.Errorf("html: void element <%s> has child nodes", n.Data) } return } // Add initial newline where there is danger of a newline beging ignored. if c := n.FirstChild; c != nil && c.Type == TextNode && strings.HasPrefix(c.Data, "\n") { switch n.Data { case "pre", "listing", "textarea": p.print("\n") } } if n.DataAtom == atom.Script || n.DataAtom == atom.Style { p.printDefineVarsOpen(n) } // Render any child nodes. switch n.Data { case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp": for c := n.FirstChild; c != nil; c = c.NextSibling { if c.Type == TextNode { p.printTextWithSourcemap(escapeText(c.Data), c.Loc[0]) } else { render1(p, c, RenderOptions{ isRoot: false, isExpression: opts.isExpression, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) } } // if n.Data == "plaintext" { // // Don't render anything else. must be the // // last element in the file, with no closing tag. // return // } default: isAllWhiteSpace := false if isComponent || isSlot { isAllWhiteSpace = true for c := n.FirstChild; c != nil; c = c.NextSibling { isAllWhiteSpace = c.Type == TextNode && strings.TrimSpace(c.Data) == "" if !isAllWhiteSpace { break } } } if !isAllWhiteSpace { switch true { case n.CustomElement: p.print(`,{`) p.print(fmt.Sprintf(`"%s": () => `, "default")) p.printTemplateLiteralOpen() for c := n.FirstChild; c != nil; c = c.NextSibling { render1(p, c, RenderOptions{ isRoot: false, isExpression: opts.isExpression, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) } p.printTemplateLiteralClose() p.print(`,}`) case isComponent: p.print(`,`) slottedChildren := make(map[string][]*Node) conditionalSlottedChildren := make([][]*Node, 0) for c := n.FirstChild; c != nil; c = c.NextSibling { slotProp := `"default"` for _, a := range c.Attr { if a.Key == "slot" { if a.Type == QuotedAttribute { slotProp = fmt.Sprintf(`"%s"`, escapeDoubleQuote(a.Val)) } else if a.Type == ExpressionAttribute { slotProp = fmt.Sprintf(`[%s]`, a.Val) } else { p.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_UNSUPPORTED_SLOT_ATTRIBUTE, Text: "slot[name] must be a static string", Range: loc.Range{Loc: a.ValLoc, Len: len(a.Val)}, }) } } } if c.Expression { nestedSlots := make([]string, 0) for c1 := c.FirstChild; c1 != nil; c1 = c1.NextSibling { for _, a := range c1.Attr { if a.Key == "slot" { if a.Type == QuotedAttribute { nestedSlotProp := fmt.Sprintf(`"%s"`, escapeDoubleQuote(a.Val)) nestedSlots = append(nestedSlots, nestedSlotProp) } else if a.Type == ExpressionAttribute { nestedSlotProp := fmt.Sprintf(`[%s]`, a.Val) nestedSlots = append(nestedSlots, nestedSlotProp) } else { panic(`unknown slot attribute type`) } } } } if len(nestedSlots) == 1 { slotProp = nestedSlots[0] slottedChildren[slotProp] = append(slottedChildren[slotProp], c) continue } else if len(nestedSlots) > 1 { conditionalChildren := make([]*Node, 0) child_loop: for c1 := c.FirstChild; c1 != nil; c1 = c1.NextSibling { for _, a := range c1.Attr { if a.Key == "slot" { if a.Type == QuotedAttribute { nestedSlotProp := fmt.Sprintf(`"%s"`, escapeDoubleQuote(a.Val)) nestedSlots = append(nestedSlots, nestedSlotProp) conditionalChildren = append(conditionalChildren, &Node{Type: TextNode, Data: fmt.Sprintf("{%s: () => ", nestedSlotProp), Loc: make([]loc.Loc, 1)}) conditionalChildren = append(conditionalChildren, c1) conditionalChildren = append(conditionalChildren, &Node{Type: TextNode, Data: "}", Loc: make([]loc.Loc, 1)}) continue child_loop } else if a.Type == ExpressionAttribute { nestedSlotProp := fmt.Sprintf(`[%s]`, a.Val) nestedSlots = append(nestedSlots, nestedSlotProp) conditionalChildren = append(conditionalChildren, &Node{Type: TextNode, Data: fmt.Sprintf("{%s: () => ", nestedSlotProp), Loc: make([]loc.Loc, 1)}) conditionalChildren = append(conditionalChildren, c1) conditionalChildren = append(conditionalChildren, &Node{Type: TextNode, Data: "}", Loc: make([]loc.Loc, 1)}) continue child_loop } else { panic(`unknown slot attribute type`) } } } conditionalChildren = append(conditionalChildren, c1) } conditionalSlottedChildren = append(conditionalSlottedChildren, conditionalChildren) continue } } // Only slot ElementNodes (except expressions containing only comments) or non-empty TextNodes! // CommentNode, JSX comments and others should not be slotted if expressionOnlyHasComment(c) { continue } if c.Type == ElementNode || c.Type == TextNode && !emptyTextNodeWithoutSiblings(c) { slottedChildren[slotProp] = append(slottedChildren[slotProp], c) } } // fix: sort keys for stable output slottedKeys := make([]string, 0, len(slottedChildren)) for k := range slottedChildren { slottedKeys = append(slottedKeys, k) } sort.Strings(slottedKeys) if len(conditionalSlottedChildren) > 0 { p.print(`$$mergeSlots(`) } p.print(`{`) numberOfSlots := len(slottedKeys) if numberOfSlots > 0 { childrenLoop: for _, slotProp := range slottedKeys { children := slottedChildren[slotProp] // If there are named slots, the default slot cannot be only whitespace if numberOfSlots > 1 && slotProp == "\"default\"" { // Loop over the children and verify that at least one non-whitespace node exists. foundNonWhitespace := false for _, child := range children { if child.Type != TextNode || strings.TrimSpace(child.Data) != "" { foundNonWhitespace = true } } if !foundNonWhitespace { continue childrenLoop } } // If selected, pass through result object on the Astro side if opts.opts.ResultScopedSlot { p.print(fmt.Sprintf(`%s: %s($$result) => `, slotProp, p.getAsyncFuncPrefix())) } else { p.print(fmt.Sprintf(`%s: %s() => `, slotProp, p.getAsyncFuncPrefix())) } p.printTemplateLiteralOpen() for _, child := range children { render1(p, child, RenderOptions{ isRoot: false, isExpression: opts.isExpression, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) } p.printTemplateLiteralClose() p.print(`,`) } } p.print(`}`) if len(conditionalSlottedChildren) > 0 { for _, children := range conditionalSlottedChildren { p.print(",") for _, child := range children { if child.Type == ElementNode { p.printTemplateLiteralOpen() } render1(p, child, RenderOptions{ isRoot: false, isExpression: opts.isExpression, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) if child.Type == ElementNode { p.printTemplateLiteralClose() } } } p.print(`)`) } case isSlot: p.print(`,`) p.printTemplateLiteralOpen() for c := n.FirstChild; c != nil; c = c.NextSibling { render1(p, c, RenderOptions{ isRoot: false, isExpression: opts.isExpression, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) } p.printTemplateLiteralClose() default: for c := n.FirstChild; c != nil; c = c.NextSibling { render1(p, c, RenderOptions{ isRoot: false, isExpression: opts.isExpression, depth: depth + 1, opts: opts.opts, cssLen: opts.cssLen, printedMaybeHead: opts.printedMaybeHead, scriptCount: opts.scriptCount, }) } } } } if len(n.Loc) == 2 { p.addSourceMapping(n.Loc[1]) } else { p.addSourceMapping(n.Loc[0]) } if n.DataAtom == atom.Script || n.DataAtom == atom.Style { p.printDefineVarsClose(n) } if isComponent || isSlot { p.print(")}") } else if !isImplicit { if n.DataAtom == atom.Head { *opts.printedMaybeHead = true p.printRenderHead() } start := 2 if len(n.Loc) > 0 { start = n.Loc[0].Start } if len(n.Loc) >= 2 { start = n.Loc[1].Start } start -= 2 p.addSourceMapping(loc.Loc{Start: start}) p.print(`</`) start += 2 p.addSourceMapping(loc.Loc{Start: start}) p.print(n.Data) start += len(n.Data) p.addSourceMapping(loc.Loc{Start: start}) p.print(`>`) } } // Section 12.1.2, "Elements", gives this list of void elements. Void elements // are those that can't have any contents. // nolint var voidElements = map[string]bool{ "area": true, "base": true, "br": true, "col": true, "embed": true, "hr": true, "img": true, "input": true, "keygen": true, // "keygen" has been removed from the spec, but are kept here for backwards compatibility. "link": true, "meta": true, "param": true, "selectedcontent": true, "source": true, "track": true, "wbr": true, } ================================================ FILE: internal/printer/print-to-json.go ================================================ package printer import ( "fmt" "regexp" "strings" . "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/loc" "github.com/withastro/compiler/internal/sourcemap" "github.com/withastro/compiler/internal/t" "github.com/withastro/compiler/internal/transform" ) type ASTPosition struct { Start ASTPoint `json:"start,omitempty"` End ASTPoint `json:"end,omitempty"` } type ASTPoint struct { Line int `json:"line,omitempty"` Column int `json:"column,omitempty"` Offset int `json:"offset,omitempty"` } type ASTNode struct { Type string `json:"type"` Name string `json:"name"` Value string `json:"value,omitempty"` Attributes []ASTNode `json:"attributes,omitempty"` Directives []ASTNode `json:"directives,omitempty"` Children []ASTNode `json:"children,omitempty"` Position ASTPosition `json:"position,omitempty"` // Attributes only Kind string `json:"kind,omitempty"` Raw string `json:"raw,omitempty"` } func escapeForJSON(value string) string { backslash := regexp.MustCompile(`\\`) value = backslash.ReplaceAllString(value, `\\`) newlines := regexp.MustCompile(`\n`) value = newlines.ReplaceAllString(value, `\n`) doublequotes := regexp.MustCompile(`"`) value = doublequotes.ReplaceAllString(value, `\"`) r := regexp.MustCompile(`\r`) value = r.ReplaceAllString(value, `\r`) t := regexp.MustCompile(`\t`) value = t.ReplaceAllString(value, `\t`) f := regexp.MustCompile(`\f`) value = f.ReplaceAllString(value, `\f`) return value } func (n ASTNode) String() string { str := fmt.Sprintf(`{"type":"%s"`, n.Type) if n.Kind != "" { str += fmt.Sprintf(`,"kind":"%s"`, n.Kind) } if n.Name != "" { str += fmt.Sprintf(`,"name":"%s"`, escapeForJSON(n.Name)) } else if n.Type == "fragment" { str += `,"name":""` } if n.Value != "" || n.Type == "attribute" { str += fmt.Sprintf(`,"value":"%s"`, escapeForJSON(n.Value)) } if n.Raw != "" || n.Type == "attribute" { str += fmt.Sprintf(`,"raw":"%s"`, escapeForJSON(n.Raw)) } if len(n.Attributes) > 0 { str += `,"attributes":[` for i, attr := range n.Attributes { str += attr.String() if i < len(n.Attributes)-1 { str += "," } } str += `]` } else if n.Type == "element" || n.Type == "component" || n.Type == "custom-element" || n.Type == "fragment" { str += `,"attributes":[]` } if len(n.Children) > 0 { str += `,"children":[` for i, node := range n.Children { str += node.String() if i < len(n.Children)-1 { str += "," } } str += `]` } else if n.Type == "element" || n.Type == "component" || n.Type == "custom-element" || n.Type == "fragment" { str += `,"children":[]` } if n.Position.Start.Line != 0 { str += `,"position":{` str += fmt.Sprintf(`"start":{"line":%d,"column":%d,"offset":%d}`, n.Position.Start.Line, n.Position.Start.Column, n.Position.Start.Offset) if n.Position.End.Line != 0 { str += fmt.Sprintf(`,"end":{"line":%d,"column":%d,"offset":%d}`, n.Position.End.Line, n.Position.End.Column, n.Position.End.Offset) } str += "}" } str += "}" return str } func PrintToJSON(sourcetext string, n *Node, opts t.ParseOptions) PrintResult { p := &printer{ builder: sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(sourcetext, len(strings.Split(sourcetext, "\n")))), sourcetext: sourcetext, } root := ASTNode{} renderNode(p, &root, n, opts) doc := root.Children[0] return PrintResult{ Output: []byte(doc.String()), } } func locToPoint(p *printer, loc loc.Loc) ASTPoint { offset := loc.Start info := p.builder.GetLineAndColumnForLocation(loc) line := info[0] column := info[1] return ASTPoint{ Line: line, Column: column, Offset: offset, } } func positionAt(p *printer, n *Node, opts t.ParseOptions) ASTPosition { if !opts.Position { return ASTPosition{} } if len(n.Loc) == 1 { s := n.Loc[0] start := locToPoint(p, s) return ASTPosition{ Start: start, } } if len(n.Loc) == 2 { s := n.Loc[0] e := n.Loc[1] // `s` and `e` mark the start location of the tag name if n.Type == ElementNode { // this adjusts `e` to be the last index of the end tag for self-closing tags if s.Start == e.Start { e.Start = e.Start + len(n.Data) + 2 } else { // this adjusts `e` to be the last index of the end tag for normally closed tags e.Start = e.Start + len(n.Data) + 1 } if s.Start != 0 { // this adjusts `s` to be the first index of the element tag s.Start = s.Start - 1 } } start := locToPoint(p, s) end := locToPoint(p, e) return ASTPosition{ Start: start, End: end, } } return ASTPosition{} } func attrPositionAt(p *printer, n *Attribute, opts t.ParseOptions) ASTPosition { if !opts.Position { return ASTPosition{} } k := n.KeyLoc start := locToPoint(p, k) return ASTPosition{ Start: start, } } func renderNode(p *printer, parent *ASTNode, n *Node, opts t.ParseOptions) { isImplicit := false for _, a := range n.Attr { if transform.IsImplicitNodeMarker(a) { isImplicit = true break } } hasChildren := n.FirstChild != nil if isImplicit { if hasChildren { for c := n.FirstChild; c != nil; c = c.NextSibling { renderNode(p, parent, c, opts) } } return } var node ASTNode node.Position = positionAt(p, n, opts) if n.Type == ElementNode { if n.Expression { node.Type = "expression" } else { node.Name = n.Data if n.Component { node.Type = "component" } else if n.CustomElement { node.Type = "custom-element" } else if n.Fragment { node.Type = "fragment" } else { node.Type = "element" } for _, attr := range n.Attr { name := attr.Key if attr.Namespace != "" { name = fmt.Sprintf("%s:%s", attr.Namespace, attr.Key) } position := attrPositionAt(p, &attr, opts) raw := "" if attr.Type == QuotedAttribute || attr.Type == TemplateLiteralAttribute { start := attr.ValLoc.Start - 1 end := attr.ValLoc.Start + len(attr.Val) char := p.sourcetext[start] if char == '=' { start += 1 } else { end += 1 } raw = strings.TrimSpace(p.sourcetext[start:end]) } attrNode := ASTNode{ Type: "attribute", Kind: attr.Type.String(), Position: position, Name: name, Value: attr.Val, Raw: raw, } node.Attributes = append(node.Attributes, attrNode) } } } else { node.Type = n.Type.String() if n.Type == TextNode || n.Type == CommentNode || n.Type == DoctypeNode { node.Value = n.Data } } if n.Type == FrontmatterNode && hasChildren { node.Value = n.FirstChild.Data } else { if !isImplicit && hasChildren { for c := n.FirstChild; c != nil; c = c.NextSibling { renderNode(p, &node, c, opts) } } } parent.Children = append(parent.Children, node) } ================================================ FILE: internal/printer/print-to-tsx.go ================================================ package printer import ( "fmt" "slices" "strings" "unicode" . "github.com/withastro/compiler/internal" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/helpers" "github.com/withastro/compiler/internal/js_scanner" "github.com/withastro/compiler/internal/loc" "github.com/withastro/compiler/internal/sourcemap" "github.com/withastro/compiler/internal/transform" "golang.org/x/net/html/atom" ) func getTSXPrefix() string { return "/* @jsxImportSource astro */\n\n" } type TSXOptions struct { IncludeScripts bool IncludeStyles bool } func PrintToTSX(sourcetext string, n *Node, opts TSXOptions, transformOpts transform.TransformOptions, h *handler.Handler) PrintResult { p := &printer{ sourcetext: sourcetext, opts: transformOpts, builder: sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(sourcetext, len(strings.Split(sourcetext, "\n")))), } p.print(getTSXPrefix()) renderTsx(p, n, &opts) return PrintResult{ Output: p.output, SourceMapChunk: p.builder.GenerateChunk(p.output), TSXRanges: finalizeRanges(string(p.output), p.ranges), } } func finalizeRanges(content string, ranges TSXRanges) TSXRanges { chunkBuilder := sourcemap.MakeChunkBuilder(nil, sourcemap.GenerateLineOffsetTables(content, len(strings.Split(content, "\n")))) return TSXRanges{ Frontmatter: loc.TSXRange{ Start: chunkBuilder.OffsetAt(loc.Loc{Start: ranges.Frontmatter.Start}), End: chunkBuilder.OffsetAt(loc.Loc{Start: ranges.Frontmatter.End}), }, Body: loc.TSXRange{ Start: chunkBuilder.OffsetAt(loc.Loc{Start: ranges.Body.Start}), End: chunkBuilder.OffsetAt(loc.Loc{Start: ranges.Body.End}), }, // Scripts and styles are already using the proper positions Scripts: ranges.Scripts, Styles: ranges.Styles, } } type TSXRanges struct { Frontmatter loc.TSXRange `js:"frontmatter"` Body loc.TSXRange `js:"body"` Scripts []TSXExtractedTag `js:"scripts"` Styles []TSXExtractedTag `js:"styles"` } var htmlEvents = map[string]bool{ "onabort": true, "onafterprint": true, "onauxclick": true, "onbeforematch": true, "onbeforeprint": true, "onbeforeunload": true, "onblur": true, "oncancel": true, "oncanplay": true, "oncanplaythrough": true, "onchange": true, "onclick": true, "onclose": true, "oncontextlost": true, "oncontextmenu": true, "oncontextrestored": true, "oncopy": true, "oncuechange": true, "oncut": true, "ondblclick": true, "ondrag": true, "ondragend": true, "ondragenter": true, "ondragleave": true, "ondragover": true, "ondragstart": true, "ondrop": true, "ondurationchange": true, "onemptied": true, "onended": true, "onerror": true, "onfocus": true, "onformdata": true, "onhashchange": true, "oninput": true, "oninvalid": true, "onkeydown": true, "onkeypress": true, "onkeyup": true, "onlanguagechange": true, "onload": true, "onloadeddata": true, "onloadedmetadata": true, "onloadstart": true, "onmessage": true, "onmessageerror": true, "onmousedown": true, "onmouseenter": true, "onmouseleave": true, "onmousemove": true, "onmouseout": true, "onmouseover": true, "onmouseup": true, "onoffline": true, "ononline": true, "onpagehide": true, "onpageshow": true, "onpaste": true, "onpause": true, "onplay": true, "onplaying": true, "onpopstate": true, "onprogress": true, "onratechange": true, "onrejectionhandled": true, "onreset": true, "onresize": true, "onscroll": true, "onscrollend": true, "onsecuritypolicyviolation": true, "onseeked": true, "onseeking": true, "onselect": true, "onslotchange": true, "onstalled": true, "onstorage": true, "onsubmit": true, "onsuspend": true, "ontimeupdate": true, "ontoggle": true, "onunhandledrejection": true, "onunload": true, "onvolumechange": true, "onwaiting": true, "onwheel": true, } func getStyleLangFromAttrs(attrs []astro.Attribute) string { if len(attrs) == 0 { return "css" } for _, attr := range attrs { if attr.Key == "lang" { if attr.Type == astro.QuotedAttribute { return strings.TrimSpace(strings.ToLower(attr.Val)) } else { // If the lang attribute exists, but is not quoted, we can't tell what's inside of it // So we'll just return "unknown" and let the downstream client decide what to do with it return "unknown" } } } return "css" } func getScriptTypeFromAttrs(attrs []astro.Attribute) string { if len(attrs) == 0 { return "processed-module" } for _, attr := range attrs { // If the script tag has `is:raw`, we can't tell what's inside of it // so the downstream client will decide what to do with it (e.g. ignore it, treat as inline, try to guess the type, etc.) if attr.Key == "is:raw" { return "raw" } if attr.Key == "type" { if attr.Type == astro.QuotedAttribute { normalizedType := strings.TrimSpace(strings.ToLower(attr.Val)) // If the script tag has `type="module"`, it's not processed, but it's still a module if normalizedType == "module" { return "module" } if ScriptJSONMimeTypes[normalizedType] { return "json" } if ScriptMimeTypes[normalizedType] { return "inline" } } // If the type is not recognized, leave it as unknown return "unknown" } } // Otherwise, it's an inline script return "inline" } type TSXExtractedTag struct { Loc loc.TSXRange `js:"position"` Type string `js:"type"` Content string `js:"content"` Lang string `js:"lang"` } func isScript(p *astro.Node) bool { return p.DataAtom == atom.Script } func isStyle(p *astro.Node) bool { return p.DataAtom == atom.Style } // Has is:raw attribute func isRawText(p *astro.Node) bool { for _, a := range p.Attr { if a.Key == "is:raw" { return true } } return false } var ScriptMimeTypes map[string]bool = map[string]bool{ "module": true, "text/typescript": true, "application/javascript": true, "text/partytown": true, "application/node": true, } var ScriptJSONMimeTypes map[string]bool = map[string]bool{ "application/json": true, "application/ld+json": true, "importmap": true, "speculationrules": true, } // This is not perfect (as in, you wouldn't use this to make a spec compliant parser), but it's good enough // for the real world. Thankfully, JSX is also a bit more lax than JavaScript, so we can spare some work. func isValidTSXAttribute(a Attribute) bool { if a.Type == SpreadAttribute { return true } for i, ch := range a.Key { if i == 0 && !isValidFirstRune(ch) { return false } // See https://tc39.es/ecma262/#prod-IdentifierName if i != 0 && !(isValidFirstRune(ch) || unicode.In(ch, unicode.Mn, unicode.Mc, unicode.Nd, unicode.Pc)) && // : is allowed inside TSX attributes, for namespaces purpose // See https://facebook.github.io/jsx/#prod-JSXNamespacedName // - is allowed inside TSX attributes, for custom attributes // See https://facebook.github.io/jsx/#prod-JSXIdentifier ch != ':' && ch != '-' { return false } } return true } // See https://tc39.es/ecma262/#prod-IdentifierStartChar func isValidFirstRune(r rune) bool { return r == '$' || r == '_' || unicode.In(r, unicode.Lu, unicode.Ll, unicode.Lt, unicode.Lm, unicode.Lo, unicode.Nl, ) } type TextType uint32 const ( RawText TextType = iota Text ScriptText JsonScriptText UnknownScriptText StyleText ) func getTextType(n *astro.Node) TextType { if script := n.Closest(isScript); script != nil { attr := astro.GetAttribute(script, "type") if attr == nil || ScriptMimeTypes[strings.ToLower(attr.Val)] { return ScriptText } // There's no difference between JSON and unknown script types in the result JSX at this time // however, we might want to add some special handling in the future, so we keep them separate if ScriptJSONMimeTypes[strings.ToLower(attr.Val)] { return JsonScriptText } return UnknownScriptText } if style := n.Closest(isStyle); style != nil { return StyleText } if n.Closest(isRawText) != nil { return RawText } return Text } func renderTsx(p *printer, n *Node, o *TSXOptions) { // Root of the document, print all children if n.Type == DocumentNode { source := []byte(p.sourcetext) props := js_scanner.GetPropsType(source) hasGetStaticPaths := js_scanner.HasGetStaticPaths(source) hasChildren := false startLen := len(p.output) for c := n.FirstChild; c != nil; c = c.NextSibling { // This checks for the first node that comes *after* the frontmatter // to ensure that the statement is properly closed with a `;`. // Without this, TypeScript can get tripped up by the body of our file. if c.PrevSibling != nil && c.PrevSibling.Type == FrontmatterNode { buf := strings.TrimSpace(string(p.output)) if len(buf)-len(getTSXPrefix()) > 1 { char := rune(buf[len(buf)-1:][0]) // If the existing buffer ends with any character other than ;, we need to add a `;` if char != ';' { p.addNilSourceMapping() p.print("{};") } } // We always need to start the body with `<Fragment>` p.addNilSourceMapping() p.print("<Fragment>\n") // Update the start location of the body to the start of the first child startLen = len(p.output) hasChildren = true } if c.PrevSibling == nil && c.Type != FrontmatterNode { p.addNilSourceMapping() p.print("<Fragment>\n") startLen = len(p.output) hasChildren = true } renderTsx(p, c, o) } p.addSourceMapping(loc.Loc{Start: len(p.sourcetext)}) p.print("\n") p.addNilSourceMapping() p.setTSXBodyRange(loc.TSXRange{ Start: startLen, End: len(p.output), }) // Only close the body with `</Fragment>` if we printed a body if hasChildren { p.print("</Fragment>\n") } componentName := getTSXComponentName(p.opts.Filename) propsIdent := props.Ident paramsIdent := "" if hasGetStaticPaths { paramsIdent = "ASTRO__Get<ASTRO__InferredGetStaticPath, 'params'>" if propsIdent == "Record<string, any>" { propsIdent = "ASTRO__MergeUnion<ASTRO__Get<ASTRO__InferredGetStaticPath, 'props'>>" } } p.print(fmt.Sprintf("export default function %s%s(_props: %s%s): any {}\n", componentName, props.Statement, propsIdent, props.Generics)) if hasGetStaticPaths { p.printf(`type ASTRO__ArrayElement<ArrayType extends readonly unknown[]> = ArrayType extends readonly (infer ElementType)[] ? ElementType : never; type ASTRO__Flattened<T> = T extends Array<infer U> ? ASTRO__Flattened<U> : T; type ASTRO__InferredGetStaticPath = ASTRO__Flattened<ASTRO__ArrayElement<Awaited<ReturnType<typeof getStaticPaths>>>>; type ASTRO__MergeUnion<T, K extends PropertyKey = T extends unknown ? keyof T : never> = T extends unknown ? T & { [P in Exclude<K, keyof T>]?: never } extends infer O ? { [P in keyof O]: O[P] } : never : never; type ASTRO__Get<T, K> = T extends undefined ? undefined : K extends keyof T ? T[K] : never;%s`, "\n") } if propsIdent != "Record<string, any>" { p.printf(`/** * Astro global available in all contexts in .astro files * * [Astro documentation](https://docs.astro.build/reference/api-reference/#astro-global) */ declare const Astro: Readonly<import('astro').AstroGlobal<%s, typeof %s`, propsIdent, componentName) if paramsIdent != "" { p.printf(", %s", paramsIdent) } p.print(">>") } return } if n.Type == FrontmatterNode { p.addSourceMapping(loc.Loc{Start: 0}) frontmatterStart := len(p.output) for c := n.FirstChild; c != nil; c = c.NextSibling { if c.Type == TextNode { if len(c.Loc) > 0 { p.addSourceMapping(c.Loc[0]) } // Find top-level returns and transform them to throws // This is needed because top-level returns are valid in Astro frontmatter // but cause TypeScript parsing errors in the generated TSX topLevelReturns := js_scanner.FindTopLevelReturns([]byte(c.Data)) if len(topLevelReturns) > 0 { // Build a new string with top-level returns replaced by throws // Note: We use "throw " (with extra space) to preserve the same character // count as "return" so sourcemaps remain accurate for the rest of the code newString := make([]byte, 0, len(c.Data)) i := 0 for i < len(c.Data) { if slices.Contains(topLevelReturns, i) { newString = append(newString, []byte("throw ")...) i += len("return") } else { newString = append(newString, c.Data[i]) i++ } } p.printTextWithSourcemap(string(newString), c.Loc[0]) } else { p.printTextWithSourcemap(c.Data, c.Loc[0]) } } else { renderTsx(p, c, o) } } if n.FirstChild != nil { p.addSourceMapping(loc.Loc{Start: n.FirstChild.Loc[0].Start + len(n.FirstChild.Data)}) p.print("") p.addSourceMapping(loc.Loc{Start: n.FirstChild.Loc[0].Start + len(n.FirstChild.Data) + 3}) p.println("") } p.setTSXFrontmatterRange(loc.TSXRange{ Start: frontmatterStart, End: len(p.output), }) return } switch n.Type { case TextNode: textType := getTextType(n) if textType == ScriptText { p.addNilSourceMapping() if o.IncludeScripts { p.print("\n{() => {") p.printTextWithSourcemap(n.Data, n.Loc[0]) p.addNilSourceMapping() p.print("}}\n") } p.addSourceMapping(loc.Loc{Start: n.Loc[0].Start + len(n.Data)}) } else if textType == StyleText || textType == JsonScriptText || textType == RawText || textType == UnknownScriptText { p.addNilSourceMapping() if (textType == StyleText && o.IncludeStyles) || ((textType == JsonScriptText || textType == UnknownScriptText) && o.IncludeScripts) || textType == RawText { p.print("{`") p.printTextWithSourcemap(escapeText(n.Data), n.Loc[0]) p.addNilSourceMapping() p.print("`}") } p.addSourceMapping(loc.Loc{Start: n.Loc[0].Start + len(n.Data)}) } else { p.printEscapedJSXTextWithSourcemap(n.Data, n.Loc[0]) } return case ElementNode: // No-op. case CommentNode: // p.addSourceMapping(n.Loc[0]) p.addNilSourceMapping() p.print("{/**") if !unicode.IsSpace(rune(n.Data[0])) { // always add a space after the opening comment p.print(" ") } p.addSourceMapping(n.Loc[0]) p.printTextWithSourcemap(escapeBraces(n.Data), n.Loc[0]) p.addNilSourceMapping() p.print("*/}") return default: return } if n.Expression { p.addSourceMapping(n.Loc[0]) if n.FirstChild == nil { p.print("{(void 0)") } else { p.print("{") } start := n.Loc[0].Start + 1 p.addSourceMapping(loc.Loc{Start: start}) for c := n.FirstChild; c != nil; c = c.NextSibling { if c.Type == TextNode { if c == n.FirstChild { p.printTextWithSourcemap(c.Data, loc.Loc{Start: start}) } else { p.printTextWithSourcemap(c.Data, c.Loc[0]) } continue } if c.PrevSibling == nil || c.PrevSibling.Type == TextNode { p.addNilSourceMapping() p.print(`<Fragment>`) } renderTsx(p, c, o) if c.NextSibling == nil || c.NextSibling.Type == TextNode { p.addNilSourceMapping() p.print(`</Fragment>`) } } if len(n.Loc) > 1 { p.addSourceMapping(n.Loc[1]) } else { p.addSourceMapping(n.Loc[0]) } p.print("}") return } isImplicit := false for _, a := range n.Attr { if transform.IsImplicitNodeMarker(a) { isImplicit = true break } } if isImplicit { // Render any child nodes for c := n.FirstChild; c != nil; c = c.NextSibling { renderTsx(p, c, o) } return } p.addSourceMapping(loc.Loc{Start: n.Loc[0].Start - 1}) p.print("<") p.addSourceMapping(loc.Loc{Start: n.Loc[0].Start}) p.print(n.Data) p.addSourceMapping(loc.Loc{Start: n.Loc[0].Start + len(n.Data)}) invalidTSXAttributes := make([]Attribute, 0) endLoc := n.Loc[0].Start + len(n.Data) for _, a := range n.Attr { if !isValidTSXAttribute(a) { invalidTSXAttributes = append(invalidTSXAttributes, a) continue } offset := 1 if a.Type != astro.ShorthandAttribute && a.Type != astro.SpreadAttribute { p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start - offset}) } p.print(" ") eqStart := a.KeyLoc.Start + strings.IndexRune(p.sourcetext[a.KeyLoc.Start:], '=') if a.Type != astro.ShorthandAttribute && a.Type != astro.SpreadAttribute { p.addSourceMapping(a.KeyLoc) } if a.Namespace != "" { p.print(a.Namespace) p.print(":") } switch a.Type { case astro.QuotedAttribute: p.print(a.Key) p.addSourceMapping(loc.Loc{Start: eqStart}) p.print("=") if len(a.Val) > 0 { p.addSourceMapping(loc.Loc{Start: a.ValLoc.Start - 1}) p.print(`"`) p.printTextWithSourcemap(encodeDoubleQuote(a.Val), loc.Loc{Start: a.ValLoc.Start}) p.addSourceMapping(loc.Loc{Start: a.ValLoc.Start + len(a.Val)}) p.print(`"`) endLoc = a.ValLoc.Start + len(a.Val) + 1 } else { p.addSourceMapping(loc.Loc{Start: a.ValLoc.Start - 1}) p.print(`"`) p.addSourceMapping(loc.Loc{Start: a.ValLoc.Start}) p.print(`"`) endLoc = a.ValLoc.Start } if _, ok := htmlEvents[a.Key]; ok { p.addTSXScript(p.builder.OffsetAt(a.ValLoc), p.builder.OffsetAt(loc.Loc{Start: endLoc}), a.Val, "event-attribute") } if a.Key == "style" { p.addTSXStyle(p.builder.OffsetAt(a.ValLoc), p.builder.OffsetAt(loc.Loc{Start: endLoc}), a.Val, "style-attribute", "css") } case astro.EmptyAttribute: p.print(a.Key) endLoc = a.KeyLoc.Start + len(a.Key) case astro.ExpressionAttribute: p.print(a.Key) p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start + len(a.Key)}) p.print(`=`) p.addSourceMapping(loc.Loc{Start: eqStart + 1}) p.print(`{`) p.printTextWithSourcemap(a.Val, loc.Loc{Start: eqStart + 2}) p.addSourceMapping(loc.Loc{Start: eqStart + 2 + len(a.Val)}) p.print(`}`) endLoc = eqStart + len(a.Val) + 2 case astro.SpreadAttribute: p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start - 4}) p.print("{") p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start - 3}) p.print("...") p.printTextWithSourcemap(a.Key, a.KeyLoc) p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start + len(a.Key)}) p.print("}") endLoc = a.KeyLoc.Start + len(a.Key) + 1 case astro.ShorthandAttribute: withoutComments := helpers.RemoveComments(a.Key) if len(withoutComments) == 0 { return } p.addSourceMapping(a.KeyLoc) p.printf(a.Key) p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start - 1}) p.printf("={") p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start}) p.print(a.Key) p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start + len(a.Key)}) p.print("}") endLoc = a.KeyLoc.Start + len(a.Key) + 1 case astro.TemplateLiteralAttribute: p.print(a.Key) p.addSourceMapping(loc.Loc{Start: eqStart}) p.print(`=`) p.addNilSourceMapping() p.print(`{`) p.addSourceMapping(loc.Loc{Start: a.ValLoc.Start - 1}) p.print("`") p.printTextWithSourcemap(a.Val, a.ValLoc) p.addSourceMapping(loc.Loc{Start: a.ValLoc.Start + len(a.Val)}) p.print("`") p.addNilSourceMapping() p.print(`}`) endLoc = a.ValLoc.Start + len(a.Val) + 1 } p.addSourceMapping(loc.Loc{Start: endLoc}) } for i, a := range invalidTSXAttributes { if i == 0 { p.print(" {...{") } else { p.print(",") } eqStart := a.KeyLoc.Start + strings.IndexRune(p.sourcetext[a.KeyLoc.Start:], '=') p.addSourceMapping(a.KeyLoc) p.print(`"`) if a.Namespace != "" { p.print(a.Namespace) p.print(":") } switch a.Type { case astro.QuotedAttribute: p.print(a.Key) p.print(`"`) p.addSourceMapping(loc.Loc{Start: eqStart}) p.print(`:`) p.addSourceMapping(loc.Loc{Start: eqStart + 1}) p.print(`"` + encodeDoubleQuote(a.Val) + `"`) case astro.EmptyAttribute: p.print(a.Key) p.print(`"`) p.addNilSourceMapping() p.print(`:`) p.addSourceMapping(a.KeyLoc) p.print(`true`) case astro.ExpressionAttribute: p.print(a.Key) p.print(`"`) p.addSourceMapping(loc.Loc{Start: eqStart}) p.print(`:`) p.addSourceMapping(loc.Loc{Start: eqStart + 1}) p.print(`(`) p.printTextWithSourcemap(a.Val, loc.Loc{Start: eqStart + 2}) p.addSourceMapping(loc.Loc{Start: eqStart + 2 + len(a.Val)}) p.print(`)`) case astro.SpreadAttribute: // noop case astro.ShorthandAttribute: withoutComments := helpers.RemoveComments(a.Key) if len(withoutComments) == 0 { return } p.addSourceMapping(a.KeyLoc) p.print(a.Key) case astro.TemplateLiteralAttribute: p.addSourceMapping(a.KeyLoc) p.print(a.Key) p.print(`":`) p.addSourceMapping(a.ValLoc) p.print(fmt.Sprintf("`%s`", a.Val)) } if i == len(invalidTSXAttributes)-1 { p.addNilSourceMapping() p.print("}}") } } if len(n.Attr) == 0 { endLoc = n.Loc[0].Start + len(n.Data) } isSelfClosing := false hasLeadingSpace := false tmpLoc := endLoc leadingSpaceLoc := endLoc if len(p.sourcetext) > tmpLoc { for i := 0; i < len(p.sourcetext[tmpLoc:]); i++ { c := p.sourcetext[endLoc : endLoc+1][0] if c == '/' && len(p.sourcetext) > endLoc+1 && p.sourcetext[endLoc+1:][0] == '>' { isSelfClosing = true break } else if c == '>' { p.addSourceMapping(loc.Loc{Start: endLoc}) endLoc++ break } else if unicode.IsSpace(rune(c)) || (c == '\\' && p.sourcetext[endLoc+1:][0] == 'n') { hasLeadingSpace = true leadingSpaceLoc = endLoc endLoc++ } else { endLoc++ } } } else { endLoc++ } if hasLeadingSpace { p.addSourceMapping(loc.Loc{Start: leadingSpaceLoc}) p.print(" ") p.addSourceMapping(loc.Loc{Start: leadingSpaceLoc + 1}) } if voidElements[n.Data] && n.FirstChild == nil { p.print("/>") return } if isSelfClosing && n.FirstChild == nil { p.addSourceMapping(loc.Loc{Start: endLoc}) p.print("/>") return } p.print(">") startTagEndLoc := loc.Loc{Start: endLoc} // Render any child nodes for c := n.FirstChild; c != nil; c = c.NextSibling { renderTsx(p, c, o) if len(c.Loc) > 1 { endLoc = c.Loc[1].Start + len(c.Data) + 1 } else if len(c.Loc) == 1 { endLoc = c.Loc[0].Start + len(c.Data) } } if len(n.Loc) > 1 { endLoc = n.Loc[1].Start - 2 } else if n.LastChild != nil && n.LastChild.Expression { if len(n.LastChild.Loc) > 1 { endLoc = n.LastChild.Loc[1].Start + 1 } } if n.FirstChild != nil && (n.DataAtom == atom.Script || n.DataAtom == atom.Style) { tagContentEndLoc := loc.Loc{Start: endLoc} if endLoc > len(p.sourcetext) { // Sometimes, when tags are not closed properly, endLoc can be greater than the length of the source text, wonky stuff tagContentEndLoc.Start = len(p.sourcetext) } if n.DataAtom == atom.Script { p.addTSXScript(p.builder.OffsetAt(startTagEndLoc), p.builder.OffsetAt(tagContentEndLoc), n.FirstChild.Data, getScriptTypeFromAttrs(n.Attr)) } if n.DataAtom == atom.Style { p.addTSXStyle(p.builder.OffsetAt(startTagEndLoc), p.builder.OffsetAt(tagContentEndLoc), n.FirstChild.Data, "tag", getStyleLangFromAttrs(n.Attr)) } } // Special case because of trailing expression close in scripts if n.DataAtom == atom.Script { p.printf("</%s>", n.Data) return } p.addSourceMapping(loc.Loc{Start: endLoc}) p.print("</") if !isSelfClosing { endLoc += 2 p.addSourceMapping(loc.Loc{Start: endLoc}) } p.print(n.Data) if !isSelfClosing { endLoc += len(n.Data) p.addSourceMapping(loc.Loc{Start: endLoc}) } p.print(">") p.addSourceMapping(loc.Loc{Start: endLoc + 1}) } ================================================ FILE: internal/printer/print-to-tsx_test.go ================================================ package printer import ( "strings" "testing" astro "github.com/withastro/compiler/internal" handler "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/transform" ) // One of the more performance-sensitive parts of the compiler is the handling of multibytes characters, this benchmark is an extreme case of that. func BenchmarkPrintToTSX(b *testing.B) { source := `🌘🔅🔘🍮🔭🔁💝🐄👋 🍘👽💽🌉🌒🔝 📇🍨💿🎷🎯💅📱🎭👞🎫💝🍢🕡 augue tincidunt 👠💋🌵💌🍌🏄🕂🍹📣🍟 🐞🍲👷🗻🏢🐫👣🐹🔷🎢👭🍗👃 🐴🐈📐💄 et, 👽👽🔙🐒🔙🍀 🐞🍐🎵💕🍂🍭🎬🎅 ac 🏃👳📑🐶👝🔷 🔕👄🐾👡🍢💗 🌓🔙🌔🏈🔒🔄🎹🎐 🎾🐝🌁📃💞📔🔕🐕 vel 🌟🏁💴🎾🔷📪💼👣📚 👟💻🗾🎋💁🏬🐮💑 🍈🌸🍓🍥💦 et vivamus 🍑🍫🔉🔹💽🍙 rhoncus eu 📰💫💀🌺 🔋💾🔱🔷🐟 convallis facilisi vitae mollis 📨🔚💮🔃🎀🍶 🌠📑🎹🌑📫🗽🐊💁🔼🕓 ac 🌂🌳🌺🎭🍧🐑 🔭🔉💉🍗👠🍦 🔄👔🎭🍇 🏤🍂🌝👜🔺 ornare 🔽🌰🎃💝👩 🐬🌻🍩👺🎆📣 risus 🌼👧🌒🍄💄🌆🐖👐📠 🕙🍈👞🏢👅🎽🏫👃🌾🍘🕑🎼💆🎳 🐻📵🔂🍩🕦👕🐶 💚🏮📟📈🌄👱🔚 sem 🔵💱💭💫 libero bibendum 🌿🍮🎧🍴💉 🐵🔷🍒🍜 sed metus, aliquam 🐷🐬📇👔🎴🔻🏊📴🍂🎽 🏀💡🍺🌾 💣🍇🐼🌀🐟🍂🐰 sed luctus 👾🍠👻💬📋🐈👀🌕💥🐹 consectetur commodo at 📓🐣🔮🐍🔺🍐🗻🍃🍠🔬 🏁🎬🐔🌙 🎿🎊🍳💓👹🐏 👦🐩👶💻 🔉💏📧🔲🍈📹💫🎧 🌟🍯🔭🎿 🎹🗻💳🔄🕁💂 🐩👠🌗🍹 facilisis 👍🍑💤🐕🐞🐻🏩 posuere 🎑🍢🍑🏨📗👣 ultrices. Vestibulum 🌠👰💼📐🍺💫 👘🐖🔕🔤🐖💶🐢📵👍🍪 🔸🎿🔤🍡💡🌄📁👉🎎🎆🎢🔒 📴👉🌱🐍🌓🎆🏩🐀👓💹🎴 🔴🌒🔘👡🕜 🌝🐉🐑🏮🎸🐳💉🎄 🍳🐲🔭📆🐎🔼🎐🐩💾🍈🎶💅🐜🍀. 🍭🍄🌳🍏💍👈🌆 🔦🏦👵🌹🏊🎐📳🌃📘🌷💎📓🎼 🎳🔁🔂🗽 💅🏰🐊👂🌴 💍🕗🐘🔼🔰🏀 🎂📻🕛🍔🎄 vitae 📎🐆🍚🌲🐰 ipsum 👘👎📅🎈🌆💮🔁🎤 💺💉💉👐🔛🐛🐺🍸📭 💠👞🍨💖 integer 🐌📀🍂🍍🐼 volutpat, condimentum elit 🎌🌕🏊👼 🌛🕜🌚🕤🍎 🏢💨👓👤 duis amet 📹🕚🔏🏧🌷🍄👦 🔠🐬👬🍩📫🏧🎷🔢💆🎭🔳👈 🔇👿💈👧🐍🍱🔉 in 🌐🎲🌠🌟🐀👛🌞🎧 sed. Tristique malesuada id 🏆🍕🕚🎯🌶 📺🐘🏀🐮🔶🏀🍥👬💞🎃🌙🎥🔦🍗 👘👣🐂🎪🔂🎾 👎👮💈🗻🌿💰📩🔋💭💃 🍍🕑🔋👠🍆🍈👰🌅 orci nam 🔀🕠🏄🍣👘🐲💘🐥 🏥🎊🎯👾📅 👛🌽🏫🔃👋🐀👶💥🔳 🕥👪👑👯 🐓💡🔠💼🔳💲 👬🔁🍜💘🔌🌎🏃🌶🍏🍯🎺 🔟📨🔜🎱💓🔛 accumsan 🎢🐭🍉🍳🕜🏆🔗📝👿 🍗👑📜🎁🍇🍕🌛 🎓🌰👣📆🌋 👌🍱💏🔇🐆🌞📝💻🌺 dictumst 📡🍝🌐💤🐅🔔🐟📥🌓🌒🍅📨🏡👺🍬🐟 🔹💳🍨💡🎋💕🕜🏦👟🕒🕣💅💗 🏤🔈📀📜📙🍌 📫🕘👍💾📱 purus 🕒🐕👵🏄💗🐤🕝 pharetra adipiscing elit, non 🏬👸🍉👑 🎠🌇👰💄 🌕🗼🏮💇🐗 📌🏤🌲👹📌 🎐🕓📎🏤💾 tellus 💆🍨👂🕟💚🎋🌠🐳 🍘🍫🎵📚📟🔛🐑🏭🔄👌📏👚🏄🐞 💏🍊👾🍘👰📄💯🔑 proin 🐡🔝🐳🎻🐶🍜 🍕🌵📯🔖💅🕔💘🏁🌾💨🔲🔼🍜🍘🕂 in suscipit 🍎🎄🎬🔙👪💣🍣💯🕧 lectus 🌾🍚🍺🐆💉🎡👷 📖🔅👼🕞 🌄🌃📦🔲💘🌶💁🍯💿 senectus 👻💈🗽🍈 📜🍉🍒🍐🔖👙🔀🐅👙 🐬📷🎨👹🎬 🐷🌐🔽💨🎿🌌🌒 🎎🍊🔕🍁. 🍈🌄📧🏊📛👗🕟 🍋💌🔁🐛💫🔰👃🕑 ridiculus mattis 🕓🍌👘🌹 👅💏🍨🔯🍂🕃🌝👠🏁🔔 pellentesque elit eu, 💝🍧💯🔄📈📛🐻📆🔱📴🔸🍻🐘🎀 viverra 🌆🍉🍉🌆 📪🍕🏊🌜📺🔆🐢🎃 mi sed tellus luctus 🍜🍃🔶🗽 laoreet dui tristique 🔆🌸🐛🔣🏤📘🕜🍃🐋 pretium ultrices 🔳🏇🍏🎭🍀 🍱🐠👬📮🌗🌆💺🔆👨🌱 et 👸📁📩🔌👯👏👫👳💸 🐎💉🔱💦👴 🏪🍶🐸🔤 💔🏤👺👻🏤🎥🐽🔦👌🔡📚💡🔁🎻 🎵👡🕀🎩 🍸🐒🍭🕠🔢🎧🍚💪 🌸🍰💏🏁🎥🐕🔬💶 💃🌽💕🐭 💯🍁📭🐚📛📓🌏🔯📦 🕝👾📒📳💵 🏠💺🕔🕁📃 adipiscing nulla congue 🔖💲🕚🎰🔋👬 sem 👽👵👜👇 vehicula 📑🌎🍁💈 consectetur nulla ullamcorper enim, 🏫🍃🐢🔄🍝🕢👖💨👺💰👎💳🏠🏤 vel fermentum porttitor lacus 📱💧🔜🔰👱🎰🐉🔓🕧🍌🕙 🎴🍫🐱🐟🌖🕞🏥🍝📜🔃🏈🔡🐁🔗 💊🍐📝👭🎢🐳🐯📷🐀🔃. 🐦🍚🔵🕜🏩 id 🌔🔤💿🌻🍹 🌎🎈📢🔬🐖💢👸🎭 🍤🔵🔓🕕📪💢🔁👽💴 🐛👶🔢🎹🏄👜📌🍭🔼🎫🍯🎦💎🐦 🐨💧🍴🌊🔁 consequat pretium 👍🏬🏰👖🍥📺👛🌕 🏄🔱🔩🐏🌞🌺🐌👑 🏰🕢💱👖🐶🐥📯🎶💧🍭🔀🎾🏩🍑 massa, est 👶🍭💅🕔🍳💗🍞💚🍩🐷🍘🌄🐇 🐼👀👀🎅 📄🎁🐞💼 placerat erat 💧🌉💻🔣🐥📠🍪📻🐃🔻👠🕘🍞🍈🍔📴 dolor 📁💹🎐🍂🍉 neque, 👗🍁🍪🌵🕛🍄 🕝👔🔼🎡🔺📬 sed 💨💧🏢🐼👘🌳🎼👹🐉🕕 enim 🔏🏁👊💀🔓 🏫🐈💀🌳🔒👵🔘🎺💴🍑 👭🔲🍰💿🔪🌊 🌅🕔🎑📛🔶🔘🎑🍯 🐲📺👬📊🍒 elit, 🌽🎱💇🎥 ultricies 📡💲🐦🌁🍚🌵🍵 🎮📧🔟🍴👕🔏🌴🔊👳🗼💒🌴👍📞🔳👜🔥🐝🐾🐧🍊 🌰🗾🌂🐄 🔛🐀🍏🍞🔔📖💉📼🐥🍱 🐜🎺🍵💿👂🌋🌸 🔞🐤🔟🍀📙🏩👑 porta id 🎄💺🍙👶👪🔪👪🐭💚📗🎅🍐👡🎭 🐦🎌🐆💫 quis nulla dictumst non 📫🍵🔫🕠 🔯🔃🐷📖💙👓💍 🎬🕃🐥🏫🍛🌆🐗👅📷 📚🍭🌞🌎🕐👜👳 🏧💬💴🎆🐄🔠📄🐰📝🌈 🎩🌇🌟📙 suspendisse 🔡👫🍐🏩🌉🕚📘🐮 👐🏁👮🎭🏣 👰📤🍙🏈👓🐰🍦 lacinia diam eu vestibulum donec faucibus 🔝🎴🌷💩🍡🍜💙 🎐🔉🌛📠🏢📄🍆🎆. <script>console.log("That's a lot of emojis.");</script> 🔲🍀🏨👆👎🏩🏦🎹 nibh 🕞🍃🔻📨 nec pulvinar 📏🐜💻🐸🐾🐾💖 risus 🍫🕜🐑🌳📇🍋🐪🎣 neque 👝🐝🌹🍫🌌👅 👹🏈🗽🐣 💉📊🐘🔉🏆💡🌸👰🔛📼 🍺🌿🐪🔜💄🎒👟 amet vitae, morbi elit rhoncus 🐙🏫🍪🎡👕💵 🌕💁💯🐷🌾📼🐀🍬🌛🕤🐊👔🔩🍂📚💵 💖🐐👹🔼🏠🍢🎼🐧 👛📩🍯💊 📞🌒📞💽 purus 🔐📼📬🌞🍁💸 morbi 🍁🎾🎡🌋 📨🌈🔈🌆🔨📕💛🏡🏯 💅🎣👽🕦 🌴🍋🔐🔑 ut congue 🔊🐧🌻🕑 gravida 🐉👅👦👢🎉🔎💪🔄🎈 👳🏡🕥🎂 🔵🐘🔠🍑👉🔀📊🏡 🌄🍯💽📁📚🐆👆🌂🎡📖🎱👮🌽🐄 orci 📚🍣🐀🎦📪💶🌓 etiam 🌑🐁💇👬🍓📅💸👟🌕🐊🎁🏪 vehicula sed 📒📭📣🌌🐁🎲🌴🏠🔏👯📥💽🌗💲🍡🔫 🎈🏥🍇🔺💍💐🌳👎🎤🍮📭📊 🍓👠🕞🕘🍂🏣🐺🍬🐖 eget lectus 🍄🍍📉📥🌾🎴🏣 🐾🕤🍼🌃🔩🐂🕣🐉💧📊🎧🎧🌂🎠 🐵🌺📰📑🏰 👣🍸💚🐗🔜🕕🐠🕙🏇📲🌙 👫💛💽🐑💸 🔐🔝🔘🎪💻🔂 nunc, 🍹🕗🏡📤🎷 sem vitae adipiscing tempor, 🕡🍚🐈🌟👎💢🔦 🌴💿💔🎳📍🌽🎒🔭🔨 lectus 🌰🌓🗽💀🍈 est 📬💑🕁🍺 📀🏮🔫🔜🎭🌷🏀💑🍂🔵 vulputate leo eget 🍗🌴🎃🔣👲📙 📺🍈🍏💦🌅💔💌 phasellus 👨💡🔯🐃🍜🐒🔵💆🍩🐦🍬🍅🍧 🎲🍰💊🏣📁💎 🔮🎉🎱👿🐟💪🕤 🍡🐜🕚🌏. Turpis vulputate 🌗🔫📙🎽🎽🎿📓 pretium congue in arcu tincidunt. Nisi 📺💍👃🕃🐫 🐝📨🍁🕕💯💭 📬🏧🔧🌟 🏩👛🏭🌽🎮🌁 magnis porttitor 🔈🎄🌓👶🏮. 👢🌵🏬🌏🍩 rutrum egestas 💙🔠🎧🐜🎣 nisi lectus feugiat 🍀🕚🕢🌀🎰💅 💝📮🌝📃🔈 🎓📝🏮🍄📢📛🍺🔊💐 sagittis 🐖🌂🎓👒👎🔼👊📣📭👿🐦🔖🍵💺🐳 🕗🔭🐭📐👍💯 massa erat 🏧🐁🎤👔💑🍣🐢 🍐🗾🔙🍊🎭🔣👐 💊💖🌳💌💿🏯🔴🎪. Id 🐅🐦💝📱💐👓🎡 ut 🏬🔔🌟🌑 🎦💮🎩🔬 👰📵🍘🎴 👪🐩👳💆🍧 purus 💮🐦🐼🎷🔦 🎺🔇🍴📈 cras pretium volutpat, etiam risus 🐇🕔🐪🔽 👠🔮🔈🎃🎧🌄👜 vel sapien 💯👻👜🎼📜 🎽💀🏊🌉📴🍻📐💉📺🐺 vivamus lorem 📃📉🔞🕦 📣🍨🌉🔩🐺🔎👙 molestie tellus 👹💃🕕📗🔵🕢 vel 📎🐍🔅📁🔁🍚🌀💏🐦 🎱🐮🕣👋📳🎑 🕙🏡🕒📖📪👩 condimentum 🐤💐💷🕞💬💝🎨💰 amet nisl fringilla bibendum 🐘🏃🎃🐉🏰🏦🐎🎱🍅🎥🔳🎵🍠🏧🍖🔭💇 🕞🎱📂🐈👇 🍯👐🎹👘💯📗👷 💂🕘👦💘💆🌗🕃🏀🔚 🔜💃🌒🍧🔝🐹🔑💂👜🐭 🎤👌👐🏩📞🕦🔜🍙 morbi 👑🐸🌄🐹🐃🐢🍳💸 malesuada quam amet, 💰🔗👗🐰🍆🕑📮 🍋🌘🐞🍧 🔻🎺🍘👐🍬🔷 pulvinar 🔭🏬👦💆 vivamus tempus 🏧👉🕢📭🔠🌔🕧💧📊🔼 👬🐑🌘🍮🎎🐝🕃👗🔴🐽🐘🌈📺👙🕝. <div onload="console.log('It really is.')"></div> Faucibus 🎈🐋🔄📇🐡💐 🎾🎩🔹🔣🎍🐸🌳 vestibulum, 🐢🌘🕜👂💬🎑🍪 📫📊🎅🌷📝🔰🏣💭👧👽🎒📕💓👯 🎎🍁🕥🕑💗🐌📩📧🍸🌙 🕝🐵🐀🐫🎫🔰👲🍛🔵🍪 🍳🏆🎷🐐 quam 💠🎈🕓🌴🌱🍨💢🍮🕡🔡💎🐍 imperdiet placerat 🔱🐑📔🔧🌍 nisl 📢🗻🐹🔏🕕🐐💦 📈🔵📴🍳📬💕📧📓🍚 📈🌸🔱💜💎🌐🍻📘🏠💵🏡🔌📦🐑🍩🔇👅📎🐆 🐌💞📺🐫🍷🌍💒🌸🔎🌇📠🔂🌼💎🍟 ut vel et 🎩👖💾👢🎵💂 🌂📱💳🐨🍲 🍔🔵🏃🎦🕐🌟🌐🔑 tristique vel 🌃💋👉🔰. Tortor sit 🌓👑🔓🐀🌹 tempus 🍸🍹📔🎂🍺🏀 consequat ornare 👽👽🍃📗🌘🍍 🔯🌲📝📥🍐🏣🐸 🍔📝📞🍣 💩🔥💨👋🔹 🌕💊🏪🕡 🏇🏉🐵👢🎳🕛🍸 🐈🏇🏭🕁🍬 rhoncus 📛🍁🔨💶 bibendum 🎋👲🏤📰🍐 🌸📙🍏🌠 🎿🎀👡💲📋💦🐵🔑🕡🕞🍥🍍👧🔘💡 arcu 💣👍🌶👬🌹🔒🌁📠🕖🎓📌🎩📫💬 👋🌚🌶💶🍊💫🔁📲🎺🐮💙🍟 🕦🌁🐡🍵🍒 🍁🍜🍪🔳📞💝💻🎶📦🔵📯🏦🐎 lobortis malesuada 💇💸🏰💅. 🌐🐕🐂📇 🎒💨🔙🐉🎹🔥🔗📴👥🎈📒🔸💍🔇🌙🕁🐊🏣💆💗🔽 tortor 🔵🐚🍓🌱🌆🐀🐻 🔓👦🍌🌔🍯👐🕣 🌅👇📝💰💝 condimentum 🍋💉🐞📆🍲👢🐬💌🎤 🎢👷👑👆 💱🎒🏬🎫 🗾💝👎👄 💊🔅🔙🐮🍗🐥 nulla adipiscing 🎦👿🐞🔋📜👵 🏄🐷🎵👾 🎿🍒🌲💄📚🌔📭👄👿🍱📷💮🔀🍄 velit.` for i := 0; i < b.N; i++ { h := handler.NewHandler(source, "AstroBenchmark") var doc *astro.Node doc, err := astro.ParseWithOptions(strings.NewReader(source), astro.ParseOptionWithHandler(h), astro.ParseOptionEnableLiteral(true)) if err != nil { h.AppendError(err) } PrintToTSX(source, doc, TSXOptions{ IncludeScripts: false, IncludeStyles: false, }, transform.TransformOptions{ Filename: "AstroBenchmark", }, h) } } ================================================ FILE: internal/printer/printer.go ================================================ package printer import ( "fmt" "regexp" "strings" "unicode/utf8" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/helpers" "github.com/withastro/compiler/internal/js_scanner" "github.com/withastro/compiler/internal/loc" "github.com/withastro/compiler/internal/sourcemap" "github.com/withastro/compiler/internal/transform" "golang.org/x/net/html/atom" ) type PrintResult struct { Output []byte SourceMapChunk sourcemap.Chunk // Optional, used only for TSX output TSXRanges TSXRanges } type printer struct { sourcetext string opts transform.TransformOptions output []byte builder sourcemap.ChunkBuilder handler *handler.Handler hasFuncPrelude bool hasInternalImports bool hasCSSImports bool needsTransitionCSS bool // Optional, used only for TSX output ranges TSXRanges } var TEMPLATE_TAG = "$$render" var CREATE_ASTRO = "$$createAstro" var CREATE_COMPONENT = "$$createComponent" var RENDER_COMPONENT = "$$renderComponent" var RENDER_HEAD = "$$renderHead" var MAYBE_RENDER_HEAD = "$$maybeRenderHead" var UNESCAPE_HTML = "$$unescapeHTML" var RENDER_SLOT = "$$renderSlot" var MERGE_SLOTS = "$$mergeSlots" var ADD_ATTRIBUTE = "$$addAttribute" var RENDER_TRANSITION = "$$renderTransition" var CREATE_TRANSITION_SCOPE = "$$createTransitionScope" var SPREAD_ATTRIBUTES = "$$spreadAttributes" var DEFINE_STYLE_VARS = "$$defineStyleVars" var DEFINE_SCRIPT_VARS = "$$defineScriptVars" var CREATE_METADATA = "$$createMetadata" var RENDER_SCRIPT = "$$renderScript" var METADATA = "$$metadata" var RESULT = "$$result" var SLOTS = "$$slots" var FRAGMENT = "Fragment" var BACKTICK = "`" var styleModuleSpecExp = regexp.MustCompile(`(\.css|\.pcss|\.postcss|\.sass|\.scss|\.styl|\.stylus|\.less)$`) func (p *printer) print(text string) { p.output = append(p.output, []byte(text)...) } func (p *printer) printf(format string, a ...interface{}) { p.print(fmt.Sprintf(format, a...)) } func (p *printer) println(text string) { p.print(text + "\n") } func (p *printer) setTSXFrontmatterRange(frontmatterRange loc.TSXRange) { p.ranges.Frontmatter = frontmatterRange } func (p *printer) setTSXBodyRange(componentRange loc.TSXRange) { p.ranges.Body = componentRange } func (p *printer) addTSXScript(start int, end int, content string, scriptType string) { p.ranges.Scripts = append(p.ranges.Scripts, TSXExtractedTag{ Loc: loc.TSXRange{ Start: start, End: end, }, Content: content, Type: scriptType, }) } func (p *printer) addTSXStyle(start int, end int, content string, styleType string, styleLang string) { p.ranges.Styles = append(p.ranges.Styles, TSXExtractedTag{ Loc: loc.TSXRange{ Start: start, End: end, }, Content: content, Type: styleType, Lang: styleLang, }) } func (p *printer) getAsyncFuncPrefix() string { // Decide whether to print `async` if top-level await is used. Use a loose check for now. funcPrefix := "" if strings.Contains(p.sourcetext, "await") { funcPrefix = "async " } return funcPrefix } func (p *printer) printTextWithSourcemap(text string, l loc.Loc) { start := l.Start skipNext := false for pos, c := range text { if skipNext { skipNext = false continue } // If we encounter a CRLF, map both characters to the same location if c == '\r' && len(text[pos:]) > 1 && text[pos+1] == '\n' { p.addSourceMapping(loc.Loc{Start: start}) p.print("\r\n") start += 2 skipNext = true continue } _, nextCharByteSize := utf8.DecodeRuneInString(text[pos:]) p.addSourceMapping(loc.Loc{Start: start}) p.print(string(c)) start += nextCharByteSize } } func (p *printer) printEscapedJSXTextWithSourcemap(text string, l loc.Loc) { start := l.Start skipNext := false for pos, c := range text { if skipNext { skipNext = false continue } // If we encounter a CRLF, map both characters to the same location if c == '\r' && len(text[pos:]) > 1 && text[pos+1] == '\n' { p.addSourceMapping(loc.Loc{Start: start}) p.print("\r\n") start += 2 skipNext = true continue } // If we encounter characters invalid in JSX, escape them by putting them in a JS expression // No need to map, since it's just text. We also don't need to handle tags, since this is only for text nodes. if c == '>' || c == '}' { p.print("{`") p.print(string(c)) p.print("`}") start++ continue } _, nextCharByteSize := utf8.DecodeRuneInString(text[pos:]) p.addSourceMapping(loc.Loc{Start: start}) p.print(string(c)) start += nextCharByteSize } } func (p *printer) printInternalImports(importSpecifier string, opts *RenderOptions) { if p.hasInternalImports { return } p.addNilSourceMapping() p.print("") p.print("import {\n ") p.addNilSourceMapping() p.print(FRAGMENT + ",\n ") p.addNilSourceMapping() p.print("render as " + TEMPLATE_TAG + ",\n ") p.addNilSourceMapping() p.print("createAstro as " + CREATE_ASTRO + ",\n ") p.addNilSourceMapping() p.print("createComponent as " + CREATE_COMPONENT + ",\n ") p.addNilSourceMapping() p.print("renderComponent as " + RENDER_COMPONENT + ",\n ") p.addNilSourceMapping() p.print("renderHead as " + RENDER_HEAD + ",\n ") p.addNilSourceMapping() p.print("maybeRenderHead as " + MAYBE_RENDER_HEAD + ",\n ") p.addNilSourceMapping() p.print("unescapeHTML as " + UNESCAPE_HTML + ",\n ") p.addNilSourceMapping() p.print("renderSlot as " + RENDER_SLOT + ",\n ") p.addNilSourceMapping() p.print("mergeSlots as " + MERGE_SLOTS + ",\n ") p.addNilSourceMapping() p.print("addAttribute as " + ADD_ATTRIBUTE + ",\n ") p.addNilSourceMapping() p.print("spreadAttributes as " + SPREAD_ATTRIBUTES + ",\n ") p.addNilSourceMapping() p.print("defineStyleVars as " + DEFINE_STYLE_VARS + ",\n ") p.addNilSourceMapping() p.print("defineScriptVars as " + DEFINE_SCRIPT_VARS + ",\n ") p.addNilSourceMapping() p.print("renderTransition as " + RENDER_TRANSITION + ",\n ") p.addNilSourceMapping() p.print("createTransitionScope as " + CREATE_TRANSITION_SCOPE + ",\n ") p.addNilSourceMapping() p.print("renderScript as " + RENDER_SCRIPT + ",\n ") // Only needed if using fallback `resolvePath` as it calls `$$metadata.resolvePath` if opts.opts.ResolvePath == nil { p.addNilSourceMapping() p.print("createMetadata as " + CREATE_METADATA) } p.addNilSourceMapping() p.print("\n} from \"") p.print(importSpecifier) p.print("\";\n") p.addNilSourceMapping() p.hasInternalImports = true } func (p *printer) printCSSImports(cssLen int) { if p.hasCSSImports { return } i := 0 for i < cssLen { p.addNilSourceMapping() // import '/src/pages/index.astro?astro&type=style&index=0&lang.css'; p.print(fmt.Sprintf("import \"%s?astro&type=style&index=%v&lang.css\";", p.opts.Filename, i)) i++ } if p.needsTransitionCSS { p.addNilSourceMapping() p.print(fmt.Sprintf(`import "%s";`, p.opts.TransitionsAnimationURL)) } p.print("\n") p.hasCSSImports = true } func (p *printer) printRenderHead() { p.addNilSourceMapping() p.print(fmt.Sprintf("${%s(%s)}", RENDER_HEAD, RESULT)) } func (p *printer) printMaybeRenderHead() { p.addNilSourceMapping() p.print(fmt.Sprintf("${%s(%s)}", MAYBE_RENDER_HEAD, RESULT)) } func (p *printer) printReturnOpen() { p.addNilSourceMapping() p.print("return ") p.printTemplateLiteralOpen() } func (p *printer) printReturnClose() { p.addNilSourceMapping() p.printTemplateLiteralClose() p.println(";") } func (p *printer) printTemplateLiteralOpen() { p.addNilSourceMapping() p.print(fmt.Sprintf("%s%s", TEMPLATE_TAG, BACKTICK)) } func (p *printer) printTemplateLiteralClose() { p.addNilSourceMapping() p.print(BACKTICK) } func isTypeModuleScript(n *astro.Node) bool { t := astro.GetAttribute(n, "type") if t != nil && t.Val == "module" { return true } return false } func (p *printer) printDefineVarsOpen(n *astro.Node) { // Only handle <script> or <style> if !(n.DataAtom == atom.Script || n.DataAtom == atom.Style) { return } if !transform.HasAttr(n, "define:vars") { return } if n.DataAtom == atom.Script { if !isTypeModuleScript(n) { p.print("(function(){") } } for _, attr := range n.Attr { if attr.Key == "define:vars" { var value string var defineCall string if n.DataAtom == atom.Script { defineCall = DEFINE_SCRIPT_VARS } else if n.DataAtom == atom.Style { defineCall = DEFINE_STYLE_VARS } switch attr.Type { case astro.ExpressionAttribute: value = strings.TrimSpace(attr.Val) } p.addNilSourceMapping() p.print(fmt.Sprintf("${%s(", defineCall)) p.addSourceMapping(attr.ValLoc) p.printf(value) p.addNilSourceMapping() p.print(")}") return } } } func (p *printer) printDefineVarsClose(n *astro.Node) { // Only handle <script> if !(n.DataAtom == atom.Script) { return } if !transform.HasAttr(n, "define:vars") { return } if !isTypeModuleScript(n) { p.print("})();") } } func (p *printer) printFuncPrelude(opts transform.TransformOptions, printAstroGlobal bool) { if p.hasFuncPrelude { return } componentName := getComponentName(opts.Filename) p.addNilSourceMapping() p.println(fmt.Sprintf("const %s = %s(%s(%s, $$props, %s) => {", componentName, CREATE_COMPONENT, p.getAsyncFuncPrefix(), RESULT, SLOTS)) if printAstroGlobal { p.addNilSourceMapping() p.println(fmt.Sprintf("const Astro = %s.createAstro($$props, %s);", RESULT, SLOTS)) p.addNilSourceMapping() p.println(fmt.Sprintf("Astro.self = %s;", componentName)) } p.hasFuncPrelude = true } func (p *printer) printFuncSuffix(opts transform.TransformOptions, n *astro.Node) { componentName := getComponentName(opts.Filename) p.addNilSourceMapping() filenameArg := "undefined" propagationArg := "undefined" if len(opts.Filename) > 0 { escapedFilename := strings.ReplaceAll(opts.Filename, "'", "\\'") filenameArg = fmt.Sprintf("'%s'", escapedFilename) } if n.Transition { propagationArg = "'self'" } p.println(fmt.Sprintf("}, %s, %s);", filenameArg, propagationArg)) p.println(fmt.Sprintf("export default %s;", componentName)) } var skippedAttributes = map[string]bool{ "define:vars": true, "set:text": true, "set:html": true, "is:raw": true, "transition:animate": true, "transition:name": true, "transition:persist": true, } var skippedAttributesToObject = map[string]bool{ "set:text": true, "set:html": true, "is:raw": true, "transition:animate": true, "transition:name": true, "transition:persist": true, } func (p *printer) printAttributesToObject(n *astro.Node) { lastAttributeSkipped := false p.print("{") for i, a := range n.Attr { if i != 0 && !lastAttributeSkipped { p.print(",") } if _, ok := skippedAttributesToObject[a.Key]; ok { lastAttributeSkipped = true continue } if a.Namespace != "" { a.Key = fmt.Sprintf(`%s:%s`, a.Namespace, a.Key) } lastAttributeSkipped = false switch a.Type { case astro.QuotedAttribute: p.addSourceMapping(a.KeyLoc) p.printf(`"%s"`, a.Key) p.print(":") p.addSourceMapping(a.ValLoc) p.print(`"` + escapeDoubleQuote(escapeNewlines(a.Val)) + `"`) case astro.EmptyAttribute: p.addSourceMapping(a.KeyLoc) p.printf(`"%s"`, a.Key) p.print(":") p.print("true") case astro.ExpressionAttribute: p.addSourceMapping(a.KeyLoc) p.printf(`"%s"`, a.Key) p.print(":") p.addSourceMapping(a.ValLoc) if a.Val == "" { p.print(`(void 0)`) } else { p.print(`(` + a.Val + `)`) } case astro.SpreadAttribute: p.addSourceMapping(loc.Loc{Start: a.KeyLoc.Start - 3}) p.print(`...(` + strings.TrimSpace(a.Key) + `)`) case astro.ShorthandAttribute: withoutComments := helpers.RemoveComments(a.Key) if len(withoutComments) == 0 { lastAttributeSkipped = true continue } p.addSourceMapping(a.KeyLoc) p.print(`"` + withoutComments + `"`) p.print(":") p.addSourceMapping(a.KeyLoc) p.print(`(` + strings.TrimSpace(a.Key) + `)`) case astro.TemplateLiteralAttribute: p.addSourceMapping(a.KeyLoc) p.printf(`"%s"`, strings.TrimSpace(a.Key)) p.print(":") p.print("`" + strings.TrimSpace(a.Val) + "`") } } p.print("}") } func (p *printer) printAttribute(attr astro.Attribute, n *astro.Node) { if _, ok := skippedAttributes[attr.Key]; ok { return } if attr.Namespace != "" || attr.Type == astro.QuotedAttribute || attr.Type == astro.EmptyAttribute { p.print(" ") } if attr.Namespace != "" { attr.Key = fmt.Sprintf("%s:%s", attr.Namespace, attr.Key) } switch attr.Type { case astro.QuotedAttribute: p.addSourceMapping(attr.KeyLoc) p.print(attr.Key) p.addNilSourceMapping() p.print(`="`) p.printTextWithSourcemap(encodeDoubleQuote(escapeInterpolation(escapeBackticks(attr.Val))), attr.ValLoc) p.addNilSourceMapping() p.print(`"`) case astro.EmptyAttribute: p.addSourceMapping(attr.KeyLoc) p.print(attr.Key) case astro.ExpressionAttribute: p.addNilSourceMapping() p.print(fmt.Sprintf("${%s(", ADD_ATTRIBUTE)) if strings.TrimSpace(attr.Val) == "" { p.addNilSourceMapping() p.print("(void 0)") } else { p.printTextWithSourcemap(attr.Val, attr.ValLoc) } p.addNilSourceMapping() p.print(`, "`) p.addSourceMapping(attr.KeyLoc) p.print(attr.Key) p.addNilSourceMapping() p.print(`")}`) case astro.SpreadAttribute: injectClass := false for p := n.Parent; p != nil; p = p.Parent { if p.Parent == nil && len(p.Styles) != 0 { injectClass = true break } } if injectClass { for _, a := range n.Attr { if a.Key == "class" || a.Key == "class:list" { injectClass = false break } } } p.print(fmt.Sprintf("${%s(", SPREAD_ATTRIBUTES)) p.addSourceMapping(loc.Loc{Start: attr.KeyLoc.Start - 3}) p.print(strings.TrimSpace(attr.Key)) if !injectClass { p.print(`)}`) } else { p.printf(`,undefined,{"class":"astro-%s"})}`, p.opts.Scope) } case astro.ShorthandAttribute: withoutComments := helpers.RemoveComments(attr.Key) if len(withoutComments) == 0 { return } p.print(fmt.Sprintf("${%s(", ADD_ATTRIBUTE)) p.addSourceMapping(attr.KeyLoc) p.print(strings.TrimSpace(attr.Key)) p.addSourceMapping(attr.KeyLoc) p.print(`, "` + withoutComments + `")}`) case astro.TemplateLiteralAttribute: p.print(fmt.Sprintf("${%s(`", ADD_ATTRIBUTE)) p.addSourceMapping(attr.ValLoc) p.print(strings.TrimSpace(attr.Val)) p.addSourceMapping(attr.KeyLoc) p.print("`" + `, "` + strings.TrimSpace(attr.Key) + `")}`) } } func (p *printer) addSourceMapping(location loc.Loc) { if location.Start < 0 { p.builder.AddSourceMapping(loc.Loc{Start: 0}, p.output) } else { p.builder.AddSourceMapping(location, p.output) } } // Reset sourcemap by pointing to last possible index func (p *printer) addNilSourceMapping() { p.builder.AddSourceMapping(loc.Loc{Start: -1}, p.output) } func (p *printer) printTopLevelAstro(opts transform.TransformOptions) { p.println(fmt.Sprintf("const $$Astro = %s(%s);\nconst Astro = $$Astro;", CREATE_ASTRO, opts.AstroGlobalArgs)) } func remove(slice []*astro.Node, node *astro.Node) []*astro.Node { var s int for i, n := range slice { if n == node { s = i } } return append(slice[:s], slice[s+1:]...) } func maybeConvertTransition(n *astro.Node) { if transform.HasAttr(n, transform.TRANSITION_ANIMATE) || transform.HasAttr(n, transform.TRANSITION_NAME) { animationExpr := convertAttributeValue(n, transform.TRANSITION_ANIMATE) transitionExpr := convertAttributeValue(n, transform.TRANSITION_NAME) n.Attr = append(n.Attr, astro.Attribute{ Key: "data-astro-transition-scope", Val: fmt.Sprintf(`%s(%s, "%s", %s, %s)`, RENDER_TRANSITION, RESULT, n.TransitionScope, animationExpr, transitionExpr), Type: astro.ExpressionAttribute, }) } if transform.HasAttr(n, transform.TRANSITION_PERSIST) { transitionPersistIndex := transform.AttrIndex(n, transform.TRANSITION_PERSIST) // If there no value, create a transition scope for this element if n.Attr[transitionPersistIndex].Val != "" { // Just rename the attribute n.Attr[transitionPersistIndex].Key = "data-astro-transition-persist" } else if transform.HasAttr(n, transform.TRANSITION_NAME) { transitionNameAttr := transform.GetAttr(n, transform.TRANSITION_NAME) n.Attr[transitionPersistIndex].Key = "data-astro-transition-persist" n.Attr[transitionPersistIndex].Val = transitionNameAttr.Val n.Attr[transitionPersistIndex].Type = transitionNameAttr.Type } else { n.Attr = append(n.Attr, astro.Attribute{ Key: "data-astro-transition-persist", Val: fmt.Sprintf(`%s(%s, "%s")`, CREATE_TRANSITION_SCOPE, RESULT, n.TransitionScope), Type: astro.ExpressionAttribute, }) } // Do a simple rename for `transition:persist-props` transitionPersistPropsIndex := transform.AttrIndex(n, transform.TRANSITION_PERSIST_PROPS) if transitionPersistPropsIndex != -1 { n.Attr[transitionPersistPropsIndex].Key = "data-astro-transition-persist-props" } } } func (p *printer) printComponentMetadata(doc *astro.Node, opts transform.TransformOptions, source []byte) { var specs []string var asrts []string var conlyspecs []string unfoundconly := make([]*astro.Node, len(doc.ClientOnlyComponentNodes)) copy(unfoundconly, doc.ClientOnlyComponentNodes) modCount := 1 l, statement := js_scanner.NextImportStatement(source, 0) for l != -1 { isClientOnlyImport := false component_loop: for _, n := range doc.ClientOnlyComponentNodes { for _, imported := range statement.Imports { exportName, isUsed := js_scanner.ExtractComponentExportName(n.Data, imported) if isUsed { attrTemplate := `"%s"` if opts.ResolvePath == nil { attrTemplate = `$$metadata.resolvePath("%s")` } // Inject metadata attributes to `client:only` Component pathAttr := astro.Attribute{ Key: "client:component-path", Val: fmt.Sprintf(attrTemplate, transform.ResolveIdForMatch(statement.Specifier, &opts)), Type: astro.ExpressionAttribute, } n.Attr = append(n.Attr, pathAttr) conlyspecs = append(conlyspecs, statement.Specifier) exportAttr := astro.Attribute{ Key: "client:component-export", Val: exportName, Type: astro.QuotedAttribute, } n.Attr = append(n.Attr, exportAttr) unfoundconly = remove(unfoundconly, n) isClientOnlyImport = true continue component_loop } } if isClientOnlyImport { continue component_loop } } if !isClientOnlyImport && opts.ResolvePath == nil { assertions := "" if statement.Assertions != "" { assertions += " assert " assertions += statement.Assertions } isCSSImport := false if len(statement.Imports) == 0 && styleModuleSpecExp.MatchString(statement.Specifier) { isCSSImport = true } if !isCSSImport && !statement.IsType { p.print(fmt.Sprintf("\nimport * as $$module%v from '%s'%s;", modCount, statement.Specifier, assertions)) specs = append(specs, statement.Specifier) asrts = append(asrts, statement.Assertions) modCount++ } } l, statement = js_scanner.NextImportStatement(source, l) } if len(unfoundconly) > 0 { for _, n := range unfoundconly { p.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_FRAGMENT_SHORTHAND_ATTRS, Text: "Unable to find matching import statement for client:only component", Hint: "A client:only component must match an import statement, either the default export or a named exported, and can't be derived from a variable in the frontmatter.", Range: loc.Range{Loc: n.Loc[0], Len: len(n.Data)}, }) } } // If we added imports, add a line break. if modCount > 1 { p.print("\n") } // Only needed if using fallback `resolvePath` as it calls `$$metadata.resolvePath` if opts.ResolvePath != nil { return } // Call createMetadata patharg := opts.Filename if patharg == "" { patharg = "import.meta.url" } else { escapedPatharg := strings.ReplaceAll(patharg, "'", "\\'") patharg = fmt.Sprintf("\"%s\"", escapedPatharg) } p.print(fmt.Sprintf("\nexport const $$metadata = %s(%s, { ", CREATE_METADATA, patharg)) // Add modules p.print("modules: [") for i := 1; i < modCount; i++ { if i > 1 { p.print(", ") } asrt := "{}" if asrts[i-1] != "" { asrt = asrts[i-1] } p.print(fmt.Sprintf("{ module: $$module%v, specifier: '%s', assert: %s }", i, specs[i-1], asrt)) } p.print("]") // Hydrated Components p.print(", hydratedComponents: [") for i, node := range doc.HydratedComponentNodes { if i > 0 { p.print(", ") } if node.CustomElement { p.print(fmt.Sprintf("'%s'", node.Data)) } else { p.print(node.Data) } } // Client-Only Components p.print("], clientOnlyComponents: [") uniquespecs := make([]string, 0) i := 0 conly_loop: for _, spec := range conlyspecs { for _, uniq := range uniquespecs { if uniq == spec { continue conly_loop } } if i > 0 { p.print(", ") } p.print(fmt.Sprintf("'%s'", spec)) i++ uniquespecs = append(uniquespecs, spec) } p.print("], hydrationDirectives: new Set([") j := 0 for directive := range doc.HydrationDirectives { if j > 0 { p.print(", ") } p.print(fmt.Sprintf("'%s'", directive)) j++ } // Hoisted scripts p.print("]), hoisted: [") for i, node := range doc.Scripts { if i > 0 { p.print(", ") } defineVars := astro.GetAttribute(node, "define:vars") src := astro.GetAttribute(node, "src") switch { case defineVars != nil: keys := js_scanner.GetObjectKeys([]byte(defineVars.Val)) params := make([]byte, 0) for i, key := range keys { params = append(params, key...) if i < len(keys)-1 { params = append(params, ',') } } p.print(fmt.Sprintf("{ type: 'define:vars', value: `%s`, keys: '%s' }", escapeInterpolation(escapeBackticks(node.FirstChild.Data)), escapeSingleQuote(string(params)))) case src != nil: p.print(fmt.Sprintf("{ type: 'external', src: '%s' }", escapeSingleQuote(src.Val))) case node.FirstChild != nil: p.print(fmt.Sprintf("{ type: 'inline', value: `%s` }", escapeInterpolation(escapeBackticks(node.FirstChild.Data)))) } } p.print("] });\n\n") } ================================================ FILE: internal/printer/printer_css_test.go ================================================ package printer import ( "strings" "testing" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/test_utils" "github.com/withastro/compiler/internal/transform" ) type testcase_css struct { name string source string scopedStyleStrategy string } func TestPrinterCSS(t *testing.T) { tests := []testcase_css{ { name: "styles (no frontmatter)", source: `<style> .title { font-family: fantasy; font-size: 28px; } .body { font-size: 1em; } </style> <h1 class="title">Page Title</h1> <p class="body">I’m a page</p>`, }, { name: "scopedStyleStrategy: 'class'", source: `<style> .title { font-family: fantasy; font-size: 28px; } .body { font-size: 1em; } </style> <h1 class="title">Page Title</h1> <p class="body">I’m a page</p>`, scopedStyleStrategy: "class", }, { name: "scopedStyleStrategy: 'attribute'", source: `<style> .title { font-family: fantasy; font-size: 28px; } .body { font-size: 1em; } </style> <h1 class="title">Page Title</h1> <p class="body">I’m a page</p>`, scopedStyleStrategy: "attribute", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // transform output from source code := test_utils.Dedent(tt.source) doc, err := astro.Parse(strings.NewReader(code)) if err != nil { t.Error(err) } scopedStyleStrategy := "where" if tt.scopedStyleStrategy == "class" || tt.scopedStyleStrategy == "attribute" { scopedStyleStrategy = tt.scopedStyleStrategy } hash := astro.HashString(code) opts := transform.TransformOptions{Scope: hash, ScopedStyleStrategy: scopedStyleStrategy} transform.ExtractStyles(doc, &opts) transform.Transform(doc, opts, handler.NewHandler(code, "/test.astro")) // note: we want to test Transform in context here, but more advanced cases could be tested separately result := PrintCSS(code, doc, transform.TransformOptions{ Scope: "astro-XXXX", InternalURL: "http://localhost:3000/", }) output := "" for _, bytes := range result.Output { output += string(bytes) } test_utils.MakeSnapshot( &test_utils.SnapshotOptions{ Testing: t, TestCaseName: tt.name, Input: code, Output: output, Kind: test_utils.CssOutput, FolderName: "__printer_css__", }) }) } } ================================================ FILE: internal/printer/printer_test.go ================================================ package printer import ( "fmt" "strings" "testing" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" types "github.com/withastro/compiler/internal/t" "github.com/withastro/compiler/internal/test_utils" "github.com/withastro/compiler/internal/transform" ) var INTERNAL_IMPORTS = fmt.Sprintf("import {\n %s\n} from \"%s\";\n", strings.Join([]string{ FRAGMENT, "render as " + TEMPLATE_TAG, "createAstro as " + CREATE_ASTRO, "createComponent as " + CREATE_COMPONENT, "renderComponent as " + RENDER_COMPONENT, "renderHead as " + RENDER_HEAD, "maybeRenderHead as " + MAYBE_RENDER_HEAD, "unescapeHTML as " + UNESCAPE_HTML, "renderSlot as " + RENDER_SLOT, "mergeSlots as " + MERGE_SLOTS, "addAttribute as " + ADD_ATTRIBUTE, "spreadAttributes as " + SPREAD_ATTRIBUTES, "defineStyleVars as " + DEFINE_STYLE_VARS, "defineScriptVars as " + DEFINE_SCRIPT_VARS, "renderTransition as " + RENDER_TRANSITION, "createTransitionScope as " + CREATE_TRANSITION_SCOPE, "renderScript as " + RENDER_SCRIPT, "createMetadata as " + CREATE_METADATA, }, ",\n "), "http://localhost:3000/") var PRELUDE = fmt.Sprintf(`const $$Component = %s(($$result, $$props, %s) => {`, CREATE_COMPONENT, SLOTS) var PRELUDE_WITH_ASYNC = fmt.Sprintf(`const $$Component = %s(async ($$result, $$props, %s) => {`, CREATE_COMPONENT, SLOTS) var PRELUDE_ASTRO_GLOBAL = fmt.Sprintf(`const Astro = $$result.createAstro($$props, %s); Astro.self = $$Component;`, SLOTS) var RETURN = fmt.Sprintf("return %s%s", TEMPLATE_TAG, BACKTICK) var SUFFIX = fmt.Sprintf("%s;", BACKTICK) + ` }, undefined, undefined); export default $$Component;` var SUFFIX_EXP_TRANSITIONS = fmt.Sprintf("%s;", BACKTICK) + ` }, undefined, 'self'); export default $$Component;` var CREATE_ASTRO_CALL = "const $$Astro = $$createAstro('https://astro.build');\nconst Astro = $$Astro;" var RENDER_HEAD_RESULT = "${$$renderHead($$result)}" type testcase struct { name string source string only bool transitions bool filename string } type jsonTestcase struct { name string source string only bool } func TestPrinter(t *testing.T) { longRandomString := "" for i := 0; i < 40; i++ { longRandomString += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()-_=+[];:'\",.?" } tests := []testcase{ { name: "text only", source: `Foo`, }, { name: "unusual line terminator I", source: `Pre-set & Time-limited \u2028holiday campaigns`, }, { name: "unusual line terminator II", source: `Pre-set & Time-limited 
holiday campaigns`, }, { name: "basic (no frontmatter)", source: `<button>Click</button>`, }, { name: "basic renderHead", source: `<html><head><title>Ah</title></head></html>`, }, { name: "head inside slot", source: `<html><slot><head></head></slot></html>`, }, { name: "head slot", source: `<html><head><slot /></html>`, }, { name: "head slot II", source: `<html><head><slot /></head><body class="a"></body></html>`, }, { name: "head slot III", source: `<html><head><slot name="baseHeadExtension"><meta property="test2" content="test2"/></slot></head>`, }, { name: "ternary component", source: `{special ? <ChildDiv><p>Special</p></ChildDiv> : <p>Not special</p>}`, }, { name: "ternary layout", source: `{toggleError ? <BaseLayout><h1>SITE: {Astro.site}</h1></BaseLayout> : <><h1>SITE: {Astro.site}</h1></>}`, }, { name: "orphan slot", source: `<slot />`, }, { name: "conditional slot", source: `<Component>{value && <div slot="test">foo</div>}</Component>`, }, { name: "ternary slot", source: `<Component>{Math.random() > 0.5 ? <div slot="a">A</div> : <div slot="b">B</div>}</Component>`, }, { name: "function expression slots I", source: "<Component>\n{() => { switch (value) {\ncase 'a': return <div slot=\"a\">A</div>\ncase 'b': return <div slot=\"b\">B</div>\ncase 'c': return <div slot=\"c\">C</div>\n}\n}}\n</Component>", }, { name: "function expression slots II (#959)", source: `<Layout title="Welcome to Astro."> <main> <Layout title="switch bug"> {components.map((component, i) => { switch(component) { case "Hero": return <div>Hero</div> case "Component2": return <div>Component2</div> } })} </Layout> </main> </Layout>`, }, { name: "expression slot", source: `<Component>{true && <div slot="a">A</div>}{false && <div slot="b">B</div>}</Component>`, }, { name: "preserve is:inline slot", source: `<slot is:inline />`, }, { name: "preserve is:inline slot II", source: `<slot name="test" is:inline />`, }, { name: "slot with fallback", source: `<body><slot><p>Hello world!</p></slot><body>`, }, { name: "slot with fallback II", source: `<slot name="test"><p>Hello world!</p></slot>`, }, { name: "slot with fallback III", source: `<div><slot name="test"><p>Fallback</p></slot></div>`, }, { name: "Preserve slot whitespace", source: `<Component> <p>Paragraph 1</p> <p>Paragraph 2</p> </Component>`, }, { name: "text only", source: "Hello!", }, { name: "custom-element", source: "{show && <client-only-element></client-only-element>}", }, { name: "attribute with template literal", source: "<a :href=\"`/home`\">Home</a>", }, { name: "attribute with template literal interpolation", source: "<a :href=\"`/${url}`\">Home</a>", }, { name: "basic (frontmatter)", source: `--- const href = '/about'; --- <a href={href}>About</a>`, }, { name: "getStaticPaths (basic)", source: `--- export const getStaticPaths = async () => { return { paths: [] } } --- <div></div>`, }, { name: "getStaticPaths (hoisted)", source: `--- const a = 0; export const getStaticPaths = async () => { return { paths: [] } } --- <div></div>`, }, { name: "getStaticPaths (hoisted II)", source: `--- const a = 0; export async function getStaticPaths() { return { paths: [] } } const b = 0; --- <div></div>`, }, { name: "export member does not panic", source: `--- mod.export(); --- <div />`, }, { name: "export comments I", source: `--- // hmm export const foo = 0 /* */ ---`, }, { name: "export comments II", source: `--- // hmm export const foo = 0; /* */ ---`, }, { name: "import assertions", source: `--- import data from "test" assert { type: 'json' }; --- `, }, { name: "import to identifier named assert", source: `--- import assert from 'test'; ---`, }, { name: "no expressions in math", source: `<p>Hello, world! This is a <em>buggy</em> formula: <span class="math math-inline"><span class="katex"><span class="katex-mathml"><math xmlns="http://www.w3.org/1998/Math/MathML"><semantics><mrow><mi>f</mi><mspace></mspace><mspace width="0.1111em"></mspace><mo lspace="0em" rspace="0.17em"></mo><mtext> ⁣</mtext><mo lspace="0em" rspace="0em">:</mo><mspace width="0.3333em"></mspace><mi>X</mi><mo>→</mo><msup><mi mathvariant="double-struck">R</mi><mrow><mn>2</mn><mi>x</mi></mrow></msup></mrow><annotation encoding="application/x-tex">f\colon X \to \mathbb R^{2x}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="base"><span class="strut" style="height:0.8889em;vertical-align:-0.1944em;"></span><span class="mord mathnormal" style="margin-right:0.10764em;">f</span><span class="mspace nobreak"></span><span class="mspace" style="margin-right:0.1111em;"></span><span class="mpunct"></span><span class="mspace" style="margin-right:-0.1667em;"></span><span class="mspace" style="margin-right:0.1667em;"></span><span class="mord"><span class="mrel">:</span></span><span class="mspace" style="margin-right:0.3333em;"></span><span class="mord mathnormal" style="margin-right:0.07847em;">X</span><span class="mspace" style="margin-right:0.2778em;"></span><span class="mrel">→</span><span class="mspace" style="margin-right:0.2778em;"></span></span><span class="base"><span class="strut" style="height:0.8141em;"></span><span class="mord"><span class="mord mathbb">R</span><span class="msupsub"><span class="vlist-t"><span class="vlist-r"><span class="vlist" style="height:0.8141em;"><span style="top:-3.063em;margin-right:0.05em;"><span class="pstrut" style="height:2.7em;"></span><span class="sizing reset-size6 size3 mtight"><span class="mord mtight"><span class="mord mtight">2</span><span class="mord mathnormal mtight">x</span></span></span></span></span></span></span></span></span></span></span></span></span></p>`, }, { name: "import order", source: `--- let testWord = "Test" // comment import data from "test"; --- <div>{data}</div> `, }, { name: "type import", source: `--- import type data from "test" --- <div>{data}</div> `, }, { name: "no expressions in math", source: `<p>Hello, world! This is a <em>buggy</em> formula: <span class="math math-inline"><span class="katex"><span class="katex-mathml"><math xmlns="http://www.w3.org/1998/Math/MathML"><semantics><mrow><mi>f</mi><mspace></mspace><mspace width="0.1111em"></mspace><mo lspace="0em" rspace="0.17em"></mo><mtext> ⁣</mtext><mo lspace="0em" rspace="0em">:</mo><mspace width="0.3333em"></mspace><mi>X</mi><mo>→</mo><msup><mi mathvariant="double-struck">R</mi><mrow><mn>2</mn><mi>x</mi></mrow></msup></mrow><annotation encoding="application/x-tex">f\colon X \to \mathbb R^{2x}</annotation></semantics></math></span><span class="katex-html" aria-hidden="true"><span class="base"><span class="strut" style="height:0.8889em;vertical-align:-0.1944em;"></span><span class="mord mathnormal" style="margin-right:0.10764em;">f</span><span class="mspace nobreak"></span><span class="mspace" style="margin-right:0.1111em;"></span><span class="mpunct"></span><span class="mspace" style="margin-right:-0.1667em;"></span><span class="mspace" style="margin-right:0.1667em;"></span><span class="mord"><span class="mrel">:</span></span><span class="mspace" style="margin-right:0.3333em;"></span><span class="mord mathnormal" style="margin-right:0.07847em;">X</span><span class="mspace" style="margin-right:0.2778em;"></span><span class="mrel">→</span><span class="mspace" style="margin-right:0.2778em;"></span></span><span class="base"><span class="strut" style="height:0.8141em;"></span><span class="mord"><span class="mord mathbb">R</span><span class="msupsub"><span class="vlist-t"><span class="vlist-r"><span class="vlist" style="height:0.8141em;"><span style="top:-3.063em;margin-right:0.05em;"><span class="pstrut" style="height:2.7em;"></span><span class="sizing reset-size6 size3 mtight"><span class="mord mtight"><span class="mord mtight">2</span><span class="mord mathnormal mtight">x</span></span></span></span></span></span></span></span></span></span></span></span></span></p>`, }, { name: "css imports are not included in module metadata", source: `--- import './styles.css'; --- `, }, { name: "solidus in template literal expression", source: "<div value={`${attr ? `a/b` : \"c\"} awesome`} />", }, { name: "nested template literal expression", source: "<div value={`${attr ? `a/b ${`c`}` : \"d\"} awesome`} />", }, { name: "component in expression with its child expression before its child element", source: "{list.map(() => (<Component>{name}<link rel=\"stylesheet\" /></Component>))}", }, { name: "expression returning multiple elements", source: `<Layout title="Welcome to Astro."> <main> <h1>Welcome to <span class="text-gradient">Astro</span></h1> { Object.entries(DUMMY_DATA).map(([dummyKey, dummyValue]) => { return ( <p> onlyp {dummyKey} </p> <h2> onlyh2 {dummyKey} </h2> <div> <h2>div+h2 {dummyKey}</h2> </div> <p> <h2>p+h2 {dummyKey}</h2> </p> ); }) } </main> </Layout>`, }, { name: "nested template literal expression", source: `<html lang="en"> <body> {Object.keys(importedAuthors).map(author => <p><div>hello</div></p>)} {Object.keys(importedAuthors).map(author => <p><div>{author}</div></p>)} </body> </html>`, }, { name: "complex nested template literal expression", source: "<div value={`${attr ? `a/b ${`c ${`d ${cool}`}`}` : \"d\"} ahhhh`} />", }, { name: "component", source: `--- import VueComponent from '../components/Vue.vue'; --- <html> <head> <title>Hello world</title> </head> <body> <VueComponent /> </body> </html>`, }, { name: "dot component", source: `--- import * as ns from '../components'; --- <html> <head> <title>Hello world</title> </head> <body> <ns.Component /> </body> </html>`, }, { name: "component with quoted attributes", source: `<Component is='"cool"' />`, }, { name: "slot with quoted attributes", source: `<Component><div slot='"name"' /></Component>`, }, { name: "#955 ternary slot with text", source: `<Component>Hello{isLeaf ? <p>Leaf</p> : <p>Branch</p>}world</Component>`, }, { name: "#955 ternary slot with elements", source: `<Component><div>{isLeaf ? <p>Leaf</p> : <p>Branch</p>}</div></Component>`, }, { name: "noscript component", source: ` <html> <head></head> <body> <noscript> <Component /> </noscript> </body> </html>`, }, { name: "noscript styles", source: `<noscript><style>div { color: red; }</style></noscript>`, }, { name: "noscript deep styles", source: `<body><noscript><div><div><div><style>div { color: red; }</style></div></div></div></noscript></body>`, }, { name: "noscript only", source: `<noscript><h1>Hello world</h1></noscript>`, }, { name: "client:only component (default)", source: `--- import Component from '../components'; --- <html> <head> <title>Hello world</title> </head> <body> <Component client:only /> </body> </html>`, }, { name: "client:only component (named)", source: `--- import { Component } from '../components'; --- <html> <head> <title>Hello world</title> </head> <body> <Component client:only /> </body> </html>`, }, { name: "client:only component (namespace)", source: `--- import * as components from '../components'; --- <html> <head> <title>Hello world</title> </head> <body> <components.A client:only /> </body> </html>`, }, { name: "client:only component (namespaced default)", source: `--- import defaultImport from '../components/ui-1'; --- <html> <head> <title>Hello world</title> </head> <body> <defaultImport.Counter1 client:only /> </body> </html>`, }, { name: "client:only component (namespaced named)", source: `--- import { namedImport } from '../components/ui-2'; --- <html> <head> <title>Hello world</title> </head> <body> <namedImport.Counter2 client:only /> </body> </html>`, }, { name: "client:only component (multiple)", source: `--- import Component from '../components'; --- <html> <head> <title>Hello world</title> </head> <body> <Component test="a" client:only /> <Component test="b" client:only /> <Component test="c" client:only /> </body> </html>`, }, { name: "iframe", source: `<iframe src="something" />`, }, { name: "conditional render", source: `<body>{false ? <div>#f</div> : <div>#t</div>}</body>`, }, { name: "conditional noscript", source: `{mode === "production" && <noscript>Hello</noscript>}`, }, { name: "conditional iframe", source: `{bool && <iframe src="something">content</iframe>}`, }, { name: "simple ternary", source: `<body>{link ? <a href="/">{link}</a> : <div>no link</div>}</body>`, }, { name: "map basic", source: `--- const items = [0, 1, 2]; --- <ul> {items.map(item => { return <li>{item}</li>; })} </ul>`, }, { name: "map without component", source: `<header><nav>{menu.map((item) => <a href={item.href}>{item.title}</a>)}</nav></header>`, }, { name: "map with component", source: `<header><nav>{menu.map((item) => <a href={item.href}>{item.title}</a>)}</nav><Hello/></header>`, }, { name: "map nested", source: `--- const groups = [[0, 1, 2], [3, 4, 5]]; --- <div> {groups.map(items => { return <ul>{ items.map(item => { return <li>{item}</li>; }) }</ul> })} </div>`, }, { name: "backtick in HTML comment", source: "<body><!-- `npm install astro` --></body>", }, { name: "HTML comment in component inside expression I", source: "{(() => <Component><!--Hi--></Component>)}", }, { name: "HTML comment in component inside expression II", source: "{list.map(() => <Component><!--Hi--></Component>)}", }, { name: "nested expressions", source: `<article>{(previous || next) && <aside>{previous && <div>Previous Article: <a rel="prev" href={new URL(previous.link, Astro.site).pathname}>{previous.text}</a></div>}{next && <div>Next Article: <a rel="next" href={new URL(next.link, Astro.site).pathname}>{next.text}</a></div>}</aside>}</article>`, }, { name: "nested expressions II", source: `<article>{(previous || next) && <aside>{previous && <div>Previous Article: <a rel="prev" href={new URL(previous.link, Astro.site).pathname}>{previous.text}</a></div>} {next && <div>Next Article: <a rel="next" href={new URL(next.link, Astro.site).pathname}>{next.text}</a></div>}</aside>}</article>`, }, { name: "nested expressions III", source: `<div>{x.map((x) => x ? <div>{true ? <span>{x}</span> : null}</div> : <div>{false ? null : <span>{x}</span>}</div>)}</div>`, }, { name: "nested expressions IV", source: `<div>{() => { if (value > 0.25) { return <span>Default</span> } else if (value > 0.5) { return <span>Another</span> } else if (value > 0.75) { return <span>Other</span> } return <span>Yet Other</span> }}</div>`, }, { name: "nested expressions V", source: `<div><h1>title</h1>{list.map(group => <Fragment><h2>{group.label}</h2>{group.items.map(item => <span>{item}</span>)}</Fragment>)}</div>`, }, { name: "nested expressions VI", source: `<div>{()=>{ if (true) { return <hr />;} if (true) { return <img />;}}}</div>`, }, { name: "nested expressions VII", source: `<div>{() => { if (value > 0.25) { return <br />;} else if (value > 0.5) { return <hr />;} else if (value > 0.75) { return <div />;} return <div>Yaaay</div>;}</div>`, }, { name: "nested expressions VIII", source: `<div>{ items.map(({ type, ...data }) => { switch (type) { case 'card': { return ( <Card {...data} /> ); } case 'paragraph': { return ( <p>{data.body}</p>);}}})}</div>`, }, { name: "expressions with JS comments", source: `--- const items = ['red', 'yellow', 'blue']; --- <div> {items.map((item) => ( // foo < > < } <div id={color}>color</div> ))} {items.map((item) => ( /* foo < > < } */ <div id={color}>color</div> ))} </div>`, }, { name: "expressions with multiple curly braces", source: ` <div> { () => { let generate = (input) => { let a = () => { return; }; let b = () => { return; }; let c = () => { return; }; }; } } </div>`, }, { name: "slots (basic)", source: `--- import Component from "test"; --- <Component> <div>Default</div> <div slot="named">Named</div> </Component>`, }, { name: "slots (no comments)", source: `--- import Component from 'test'; --- <Component> <div>Default</div> <!-- A comment! --> <div slot="named">Named</div> </Component>`, }, { name: "slots (expression)", source: ` <Component {data}> {items.map(item => <div>{item}</div>)} </Component>`, }, { name: "head expression", source: `--- const name = "world"; --- <html> <head> <title>Hello {name}</title> </head> <body> <div></div> </body> </html>`, }, { name: "head expression and conditional rendering of fragment", source: `--- const testBool = true; --- <html> <head> <meta charset="UTF-8" /> <title>{testBool ? "Hey" : "Bye"}</title> {testBool && (<><meta name="description" content="test" /></>)} </head> <body> <div></div> </body> </html>`, }, { name: "conditional rendering of title containing expression", source: `{ props.title && ( <> <title>{props.title}</title> <meta property="og:title" content={props.title} /> <meta name="twitter:title" content={props.title} /> </> ) }`, }, { name: "styles (no frontmatter)", source: `<style> .title { font-family: fantasy; font-size: 28px; } .body { font-size: 1em; } </style> <h1 class="title">Page Title</h1> <p class="body">I’m a page</p>`, }, { name: "html5 boilerplate", source: `<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>A Basic HTML5 Template</title> <meta name="description" content="A simple HTML5 Template for new projects."> <meta name="author" content="SitePoint"> <meta property="og:title" content="A Basic HTML5 Template"> <meta property="og:type" content="website"> <meta property="og:url" content="https://www.sitepoint.com/a-basic-html5-template/"> <meta property="og:description" content="A simple HTML5 Template for new projects."> <meta property="og:image" content="image.png"> <link rel="icon" href="/favicon.ico"> <link rel="icon" href="/favicon.svg" type="image/svg+xml"> <link rel="apple-touch-icon" href="/apple-touch-icon.png"> <link rel="stylesheet" href="css/styles.css?v=1.0"> </head> <body> <!-- your content here... --> <script is:inline src="js/scripts.js"></script> </body> </html>`, }, { name: "React framework example", source: `--- // Component Imports import Counter from '../components/Counter.jsx' const someProps = { count: 0, } // Full Astro Component Syntax: // https://docs.astro.build/core-concepts/astro-components/ --- <html lang="en"> <head> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width" /> <link rel="icon" type="image/x-icon" href="/favicon.ico" /> <style> :global(:root) { font-family: system-ui; padding: 2em 0; } :global(.counter) { display: grid; grid-template-columns: repeat(3, minmax(0, 1fr)); place-items: center; font-size: 2em; margin-top: 2em; } :global(.children) { display: grid; place-items: center; margin-bottom: 2em; } </style> </head> <body> <main> <Counter {...someProps} client:visible> <h1>Hello React!</h1> </Counter> </main> </body> </html>`, }, { name: "script in <head>", source: `--- import Widget from '../components/Widget.astro'; import Widget2 from '../components/Widget2.astro'; --- <html lang="en"> <head> <script type="module" src="/regular_script.js"></script> </head>`, }, { name: "script hoist with frontmatter", source: `--- --- <script type="module" hoist>console.log("Hello");</script>`, }, { name: "script hoist without frontmatter", source: ` <main> <script type="module" hoist>console.log("Hello");</script> `, }, { name: "scriptinline", source: `<main><script is:inline type="module">console.log("Hello");</script>`, }, { name: "script define:vars I", source: `<script define:vars={{ value: 0 }}>console.log(value);</script>`, }, { name: "script define:vars II", source: `<script define:vars={{ "dash-case": true }}>console.log(dashCase);</script>`, }, { name: "script before elements", source: `<script>Here</script><div></div>`, }, { name: "script", source: `<main><script>console.log("Hello");</script>`, filename: "/src/pages/index.astro", }, { name: "script multiple", source: `<main><script>console.log("Hello");</script><script>console.log("World");</script>`, filename: "/src/pages/index.astro", }, { name: "script external", source: `<main><script src="./hello.js"></script>`, filename: "/src/pages/index.astro", }, { name: "script external in expression", source: `<main>{<script src="./hello.js"></script>}`, filename: "/src/pages/index.astro", }, { name: "script in expression", source: `<main>{true && <script>console.log("hello")</script>}`, filename: "/src/pages/index.astro", }, { name: "script inline", source: `<main><script is:inline type="module">console.log("Hello");</script>`, }, { name: "script mixed handled and inline", source: `<main><script>console.log("Hello");</script><script is:inline>console.log("World");</script>`, filename: "/src/pages/index.astro", }, { name: "text after title expression", source: `<title>a {expr} b</title>`, }, { name: "text after title expressions", source: `<title>a {expr} b {expr} c</title>`, }, { name: "slots (dynamic name)", source: `--- import Component from 'test'; const name = 'named'; --- <Component> <div slot={name}>Named</div> </Component>`, }, { name: "slots (named only)", source: `<Slotted> <span slot="a">A</span> <span slot="b">B</span> <span slot="c">C</span> </Slotted>`, }, { name: "condition expressions at the top-level", source: `{cond && <span></span>}{cond && <strong></strong>}`, }, { name: "condition expressions at the top-level with head content", source: `{cond && <meta charset=utf8>}{cond && <title>My title</title>}`, }, { name: "custom elements", source: `--- import 'test'; --- <my-element></my-element>`, }, { name: "gets all potential hydrated components", source: `--- import One from 'one'; import Two from 'two'; import 'custom-element'; const name = 'world'; --- <One client:load /> <Two client:load /> <my-element client:load /> `, }, { name: "Component siblings are siblings", source: `<BaseHead></BaseHead><link href="test">`, }, { name: "Self-closing components siblings are siblings", source: `<BaseHead /><link href="test">`, }, { name: "Self-closing script in head works", source: `<html><head><script is:inline /></head><html>`, }, { name: "Self-closing title", source: `<title />`, }, { name: "Self-closing title II", source: `<html><head><title /></head><body></body></html>`, }, { name: "Self-closing components in head can have siblings", source: `<html><head><BaseHead /><link href="test"></head><html>`, }, { name: "Self-closing formatting elements", source: `<div id="1"><div id="2"><div id="3"><i/><i/><i/></div></div></div>`, }, { name: "Self-closing formatting elements 2", source: `<body> <div id="1"><div id="2"><div id="3"><i id="a" /></div></div></div> <div id="4"><div id="5"><div id="6"><i id="b" /></div></div></div> <div id="7"><div id="8"><div id="9"><i id="c" /></div></div></div> </body>`, }, { name: "Nested HTML in expressions, wrapped in parens", source: `--- const image = './penguin.png'; const canonicalURL = new URL('http://example.com'); --- {image && (<meta property="og:image" content={new URL(image, canonicalURL)}>)}`, }, { name: "Use of interfaces within frontmatter", source: `--- interface MarkdownFrontmatter { date: number; image: string; author: string; } let allPosts = Astro.fetchContent<MarkdownFrontmatter>('./post/*.md'); --- <div>testing</div>`, }, { name: "dynamic import", source: `--- const markdownDocs = await Astro.glob('../markdown/*.md') const article2 = await import('../markdown/article2.md') --- <div /> `, }, { name: "Component names A-Z", source: `--- import AComponent from '../components/AComponent.jsx'; import ZComponent from '../components/ZComponent.jsx'; --- <body> <AComponent /> <ZComponent /> </body>`, }, { name: "Parser can handle files > 4096 chars", source: `<html><body>` + longRandomString + `<img width="1600" height="1131" class="img" src="https://images.unsplash.com/photo-1469854523086-cc02fe5d8800?w=1200&q=75" srcSet="https://images.unsplash.com/photo-1469854523086-cc02fe5d8800?w=1200&q=75 800w,https://images.unsplash.com/photo-1469854523086-cc02fe5d8800?w=1200&q=75 1200w,https://images.unsplash.com/photo-1469854523086-cc02fe5d8800?w=1600&q=75 1600w,https://images.unsplash.com/photo-1469854523086-cc02fe5d8800?w=2400&q=75 2400w" sizes="(max-width: 800px) 800px, (max-width: 1200px) 1200px, (max-width: 1600px) 1600px, (max-width: 2400px) 2400px, 1200px" >`, }, { name: "SVG styles", source: `<svg><style>path { fill: red; }</style></svg>`, }, { name: "svg expressions", source: `--- const title = 'icon'; --- <svg>{title ?? null}</svg>`, }, { name: "advanced svg expression", source: `--- const title = 'icon'; --- <svg>{title ? <title>{title}</title> : null}</svg>`, }, { name: "Empty script", source: `<script hoist></script>`, }, { name: "Empty style", source: `<style define:vars={{ color: "Gainsboro" }}></style>`, }, { name: "No extra script tag", source: `<!-- Global Metadata --> <meta charset="utf-8"> <meta name="viewport" content="width=device-width"> <link rel="icon" type="image/svg+xml" href="/favicon.svg" /> <link rel="alternate icon" type="image/x-icon" href="/favicon.ico" /> <link rel="sitemap" href="/sitemap.xml"/> <!-- Global CSS --> <link rel="stylesheet" href="/theme.css" /> <link rel="stylesheet" href="/code.css" /> <link rel="stylesheet" href="/index.css" /> <!-- Preload Fonts --> <link rel="preconnect" href="https://fonts.googleapis.com"> <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin> <link href="https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:ital@0;1&display=swap" rel="stylesheet"> <!-- Scrollable a11y code helper --> <script type="module" src="/make-scrollable-code-focusable.js" /> <!-- This is intentionally inlined to avoid FOUC --> <script is:inline> const root = document.documentElement; const theme = localStorage.getItem('theme'); if (theme === 'dark' || (!theme) && window.matchMedia('(prefers-color-scheme: dark)').matches) { root.classList.add('theme-dark'); } else { root.classList.remove('theme-dark'); } </script> <!-- Global site tag (gtag.js) - Google Analytics --> <!-- <script async src="https://www.googletagmanager.com/gtag/js?id=G-TEL60V1WM9"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-TEL60V1WM9'); </script> -->`, }, { name: "All components", source: ` --- import { Container, Col, Row } from 'react-bootstrap'; --- <Container> <Row> <Col> <h1>Hi!</h1> </Col> </Row> </Container> `, }, { name: "Mixed style siblings", source: `<head> <style is:global>div { color: red }</style> <style is:scoped>div { color: green }</style> <style>div { color: blue }</style> </head> <div />`, }, { name: "spread with double quotation marks", source: `<div {...propsFn("string")}/>`, }, { name: "class with spread", source: `<div class="something" {...Astro.props} />`, }, { name: "class:list with spread", source: `<div class:list="something" {...Astro.props} />`, }, { name: "class list", source: `<div class:list={['one', 'variable']} />`, }, { name: "class and class list simple array", source: `<div class="two" class:list={['one', 'variable']} />`, }, { name: "class and class list object", source: `<div class="two three" class:list={['hello goodbye', { hello: true, world: true }]} />`, }, { name: "class and class list set", source: `<div class="two three" class:list={[ new Set([{hello: true, world: true}]) ]} />`, }, { name: "spread without style or class", source: `<div {...Astro.props} />`, }, { name: "spread with style but no explicit class", source: `<style>div { color: red; }</style><div {...Astro.props} />`, }, { name: "Fragment", source: `<body><Fragment><div>Default</div><div>Named</div></Fragment></body>`, }, { name: "Fragment shorthand", source: `<body><><div>Default</div><div>Named</div></></body>`, }, { name: "Fragment shorthand only", source: `<>Hello</>`, }, { name: "Fragment literal only", source: `<Fragment>world</Fragment>`, }, { name: "Fragment slotted", source: `<body><Component><><div>Default</div><div>Named</div></></Component></body>`, }, { name: "Fragment slotted with name", source: `<body><Component><Fragment slot=named><div>Default</div><div>Named</div></Fragment></Component></body>`, }, { name: "Fragment with await", source: `<body><Fragment> { await Promise.resolve("Awaited") } </Fragment></body>`, }, { name: "Fragment shorthand with await", source: `<body><> { await Promise.resolve("Awaited") } </></body>`, }, { name: "Fragment wrapping link with awaited href", source: `<head><Fragment><link rel="preload" href={(await import('../fonts/some-font.woff2')).default} as="font" crossorigin /></Fragment></head>`, }, { name: "Component with await", source: `<body><Component> { await Promise.resolve("Awaited") } </Component></body>`, }, { name: "Preserve slots inside custom-element", source: `<body><my-element><div slot=name>Name</div><div>Default</div></my-element></body>`, }, { name: "Preserve slot attribute in expression for custom element", source: `<body><my-element>{!href ? <button slot={slotName}>Button</button> : <a href={href} slot={slotName}>Link</a>}</my-element></body>`, }, { name: "Preserve slot attribute in conditional expression for custom element", source: `<body><my-element>{show && <div slot="content">Content</div>}</my-element></body>`, }, { name: "Preserve slot attribute at root level in expression", source: `{!href ? <button slot={slotName}>Button</button> : <a href={href} slot={slotName}>Link</a>}`, }, { name: "Preserve namespaces", source: `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><rect xlink:href="#id"></svg>`, }, { name: "Preserve namespaces in expressions", source: `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><rect xlink:href={` + BACKTICK + `#${iconId}` + BACKTICK + `}></svg>`, }, { name: "Preserve namespaces for components", source: `<Component some:thing="foobar">`, }, { name: "import.meta.env", source: fmt.Sprintf(`--- import Header from '../../components/Header.jsx' import Footer from '../../components/Footer.astro' import ProductPageContent from '../../components/ProductPageContent.jsx'; export async function getStaticPaths() { let products = await fetch(%s${import.meta.env.PUBLIC_NETLIFY_URL}/.netlify/functions/get-product-list%s) .then(res => res.json()).then((response) => { console.log('--- built product pages ---') return response.products.edges }); return products.map((p, i) => { return { params: {pid: p.node.handle}, props: {product: p}, }; }); } const { product } = Astro.props; --- <!doctype html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>Shoperoni | Buy {product.node.title}</title> <link rel="icon" type="image/svg+xml" href="/favicon.svg"> <link rel="stylesheet" href="/style/global.css"> </head> <body> <Header /> <div class="product-page"> <article> <ProductPageContent client:visible product={product.node} /> </article> </div> <Footer /> </body> </html>`, BACKTICK, BACKTICK), }, { name: "import.meta", source: `--- const components = import.meta.glob("../components/*.astro", { import: 'default' }); ---`, }, { name: "doctype", source: `<!DOCTYPE html><div/>`, }, { name: "select option expression", source: `--- const value = 'test'; --- <select><option>{value}</option></select>`, }, { name: "select nested option", source: `--- const value = 'test'; --- <select>{value && <option>{value}</option>}</select>`, }, { name: "select map expression", source: `<select>{[1, 2, 3].map(num => <option>{num}</option>)}</select><div>Hello world!</div>`, }, { name: "textarea", source: `--- const value = 'test'; --- <textarea>{value}</textarea>`, }, { name: "textarea inside expression", source: `{bool && <textarea>{value}</textarea>} {!bool && <input>}`, }, { name: "table simple case", source: `--- const content = "lol"; --- <html> <body> <table> <tr> <td>{content}</td> </tr> { ( <tr> <td>1</td> </tr> ) } </table>Hello </body> </html> `, }, { name: "complex table", source: `<html lang="en"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width" /> <title>Astro Multi Table</title> </head> <body> <main> <section> {Array(3).fill(false).map((item, idx) => <div> <div class="row"> {'a'} <table> <thead> <tr> <>{Array(7).fill(false).map((entry, index) => <th>A</th>)}</> </tr> </thead> <tbody> <tr><td></td></tr> </tbody> </table> </div> </div>)} </section> <section> <div class="row"> <table> <thead> <tr> <th>B</th> <th>B</th> <th>B</th> </tr> </thead> <tbody> <tr><td></td></tr> </tbody> </table> </div> </section> </main> </body> </html>`, }, { name: "table with expression in 'th'", source: `--- const { title, footnotes, tables } = Astro.props; interface Table { title: string; data: any[]; showTitle: boolean; footnotes: string; } console.log(tables); --- <div> <div> <h2> {title} </h2> { tables.map((table: Table) => ( <> <div> <h3 class="text-3xl sm:text-5xl font-bold">{table.title}</h3> <table> <thead> {Object.keys(table.data[0]).map((thead) => ( <th>{thead}</th> ))} </thead> <tbody> {table.data.map((trow) => ( <tr> {Object.values(trow).map((cell, index) => ( <td> {cell} </td> ))} </tr> ))} </tbody> </table> </div> </> )) } </div> </div>`, }, { name: "table expressions (no implicit tbody)", source: `--- const items = ["Dog", "Cat", "Platipus"]; --- <table>{items.map(item => (<tr><td>{item}</td></tr>))}</table>`, }, { name: "table caption expression", source: `<table><caption>{title}</caption><tr><td>Hello</td></tr></table>`, }, { name: "table expression with trailing div", source: `<table><tr><td>{title}</td></tr></table><div>Div</div>`, }, { name: "tbody expressions", source: `--- const items = ["Dog", "Cat", "Platipus"]; --- <table><tr><td>Name</td></tr>{items.map(item => (<tr><td>{item}</td></tr>))}</table>`, }, { name: "tbody expressions 2", source: `--- const items = ["Dog", "Cat", "Platipus"]; --- <table><tr><td>Name</td></tr>{items.map(item => (<tr><td>{item}</td><td>{item + 's'}</td></tr>))}</table>`, }, { name: "tbody expressions 3", source: `<table><tbody>{rows.map(row => (<tr><td><strong>{row}</strong></td></tr>))}</tbody></table>`, }, { name: "td expressions", source: `<table><tr><td><h2>Row 1</h2></td><td>{title}</td></tr></table>`, }, { name: "td expressions II", source: `<table>{data.map(row => <tr>{row.map(cell => <td>{cell}</td>)}</tr>)}</table>`, }, { name: "self-closing td", source: `<table>{data.map(row => <tr>{row.map(cell => <td set:html={cell} />)}</tr>)}</table>`, }, { name: "th expressions", source: `<table><thead><tr><th>{title}</th></tr></thead></table>`, }, { name: "tr only", source: `<tr><td>col 1</td><td>col 2</td><td>{foo}</td></tr>`, }, { name: "caption only", source: `<caption>Hello world!</caption>`, }, { name: "anchor expressions", source: `<a>{expr}</a>`, }, { name: "anchor inside expression", source: `{true && <a>expr</a>}`, }, { name: "anchor content", source: `<a><div><h3></h3><ul><li>{expr}</li></ul></div></a>`, }, { name: "small expression", source: `<div><small>{a}</small>{data.map(a => <Component value={a} />)}</div>`, }, { name: "division inside expression", source: `<div>{16 / 4}</div>`, }, { name: "escaped entity", source: `<img alt="A person saying &#x22;hello&#x22;">`, }, { name: "textarea in form", source: `<html><Component><form><textarea></textarea></form></Component></html>`, }, { name: "select in form", source: `<form><select>{options.map((option) => (<option value={option.id}>{option.title}</option>))}</select><div><label>Title 3</label><input type="text" /></div><button type="submit">Submit</button></form>`, }, { name: "selectedcontent element in customizable select", source: `<select><button><selectedcontent></selectedcontent></button><option>Option 1</option><option>Option 2</option></select>`, }, { name: "selectedcontent self-closing element", source: `<select><button><selectedcontent /></button><option>Option 1</option><option>Option 2</option></select>`, }, { name: "Expression in form followed by other sibling forms", source: "<form><p>No expression here. So the next form will render.</p></form><form><h3>{data.formLabelA}</h3></form><form><h3>{data.formLabelB}</h3></form><form><p>No expression here, but the last form before me had an expression, so my form didn't render.</p></form><form><h3>{data.formLabelC}</h3></form><div><p>Here is some in-between content</p></div><form><h3>{data.formLabelD}</h3></form>", }, { name: "slot inside of Base", source: `<Base title="Home"><div>Hello</div></Base>`, }, { name: "user-defined `implicit` is printed", source: `<html implicit></html>`, }, { name: "css comment doesn’t produce semicolon", source: `<style>/* comment */.container { padding: 2rem; } </style><div class="container">My Text</div>`, }, { name: "sibling expressions", source: `<html><body> <table> {true ? (<tr><td>Row 1</td></tr>) : null} {true ? (<tr><td>Row 2</td></tr>) : null} {true ? (<tr><td>Row 3</td></tr>) : null} </table> </body>`, }, { name: "table", source: "<table><tr>{[0,1,2].map(x => (<td>{x}</td>))}</tr></table>", }, { name: "table II", source: "<table><thead><tr>{['Hey','Ho'].map((item)=> <th scope=\"col\">{item}</th>)}</tr></thead></table>", }, { name: "table III", source: "<table><tbody><tr><td>Cell</td><Cell /><Cell /><Cell /></tr></tbody></table>", }, { name: "table IV", source: "<body><div><tr><td>hello world</td></tr></div></body>", }, { name: "table slot I", source: "<table><slot /></table>", }, { name: "table slot II", source: "<table><tr><slot /></tr></table>", }, { name: "table slot III", source: "<table><td><slot /></td></table>", }, { name: "table slot IV", source: "<table><thead><slot /></thead></table>", }, { name: "table slot V", source: "<table><tbody><slot /></tbody></table>", }, { name: "XElement", source: `<XElement {...attrs}></XElement>{onLoadString ? <script data-something></script> : null }`, }, { name: "Empty expression", source: "<body>({})</body>", }, { name: "Empty expression with whitespace", source: "<body>({ })</body>", }, { name: "expression with leading whitespace", source: `<section> <ul class="font-mono text-sm flex flex-col gap-0.5"> { <li>Build: { new Date().toISOString() }</li> <li>NODE_VERSION: { process.env.NODE_VERSION }</li> } </ul> </section>`, }, { name: "Empty attribute expression", source: "<body attr={}></body>", }, { name: "is:raw", source: "<article is:raw><% awesome %></article>", }, { name: "Component is:raw", source: "<Component is:raw>{<% awesome %>}</Component>", }, { name: "set:html", source: "<article set:html={content} />", }, { name: "set:html with quoted attribute", source: `<article set:html="content" />`, }, { name: "set:html with template literal attribute without variable", source: `<article set:html=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:html with template literal attribute with variable", source: `<article set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:text", source: "<article set:text={content} />", }, { name: "set:text with quoted attribute", source: `<article set:text="content" />`, }, { name: "set:text with template literal attribute without variable", source: `<article set:text=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:text with template literal attribute with variable", source: `<article set:text=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:html on Component", source: `<Component set:html={content} />`, }, { name: "set:html on Component with quoted attribute", source: `<Component set:html="content" />`, }, { name: "set:html on Component with template literal attribute without variable", source: `<Component set:html=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:html on Component with template literal attribute with variable", source: `<Component set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:text on Component", source: "<Component set:text={content} />", }, { name: "set:text on Component with quoted attribute", source: `<Component set:text="content" />`, }, { name: "set:text on Component with template literal attribute without variable", source: `<Component set:text=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:text on Component with template literal attribute with variable", source: `<Component set:text=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:html on custom-element", source: "<custom-element set:html={content} />", }, { name: "set:html on custom-element with quoted attribute", source: `<custom-element set:html="content" />`, }, { name: "set:html on custom-element with template literal attribute without variable", source: `<custom-element set:html=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:html on custom-element with template literal attribute with variable", source: `<custom-element set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:text on custom-element", source: "<custom-element set:text={content} />", }, { name: "set:text on custom-element with quoted attribute", source: `<custom-element set:text="content" />`, }, { name: "set:text on custom-element with template literal attribute without variable", source: `<custom-element set:text=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:text on custom-element with template literal attribute with variable", source: `<custom-element set:text=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:html on self-closing tag", source: "<article set:html={content} />", }, { name: "set:html on self-closing tag with quoted attribute", source: `<article set:html="content" />`, }, { name: "set:html on self-closing tag with template literal attribute without variable", source: `<article set:html=` + BACKTICK + `content` + BACKTICK + ` />`, }, { name: "set:html on self-closing tag with template literal attribute with variable", source: `<article set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:html with other attributes", source: "<article set:html={content} cool=\"true\" />", }, { name: "set:html with quoted attribute and other attributes", source: `<article set:html="content" cool="true" />`, }, { name: "set:html with template literal attribute without variable and other attributes", source: `<article set:html=` + BACKTICK + `content` + BACKTICK + ` cool="true" />`, }, { name: "set:html with template literal attribute with variable and other attributes", source: `<article set:html=` + BACKTICK + `${content}` + BACKTICK + ` cool="true" />`, }, { name: "set:html on empty tag", source: "<article set:html={content}></article>", }, { name: "set:html on empty tag with quoted attribute", source: `<article set:html="content"></article>`, }, { name: "set:html on empty tag with template literal attribute without variable", source: `<article set:html=` + BACKTICK + `content` + BACKTICK + `></article>`, }, { name: "set:html on empty tag with template literal attribute with variable", source: `<article set:html=` + BACKTICK + `${content}` + BACKTICK + `></article>`, }, { // If both "set:*" directives are passed, we only respect the first one name: "set:html and set:text", source: "<article set:html={content} set:text={content} />", }, // { name: "set:html on tag with children", source: "<article set:html={content}>!!!</article>", }, { name: "set:html on tag with children and quoted attribute", source: `<article set:html="content">!!!</article>`, }, { name: "set:html on tag with children and template literal attribute without variable", source: `<article set:html=` + BACKTICK + `content` + BACKTICK + `>!!!</article>`, }, { name: "set:html on tag with children and template literal attribute with variable", source: `<article set:html=` + BACKTICK + `${content}` + BACKTICK + `>!!!</article>`, }, { name: "set:html on tag with empty whitespace", source: "<article set:html={content}> </article>", }, { name: "set:html on tag with empty whitespace and quoted attribute", source: `<article set:html="content"> </article>`, }, { name: "set:html on tag with empty whitespace and template literal attribute without variable", source: `<article set:html=` + BACKTICK + `content` + BACKTICK + `> </article>`, }, { name: "set:html on tag with empty whitespace and template literal attribute with variable", source: `<article set:html=` + BACKTICK + `${content}` + BACKTICK + `> </article>`, }, { name: "set:html on script", source: "<script set:html={content} />", }, { name: "set:html on script with quoted attribute", source: `<script set:html="alert(1)" />`, }, { name: "set:html on script with template literal attribute without variable", source: `<script set:html=` + BACKTICK + `alert(1)` + BACKTICK + ` />`, }, { name: "set:html on script with template literal attribute with variable", source: `<script set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:html on style", source: "<style set:html={content} />", }, { name: "set:html on style with quoted attribute", source: `<style set:html="h1{color:green;}" />`, }, { name: "set:html on style with template literal attribute without variable", source: `<style set:html=` + BACKTICK + `h1{color:green;}` + BACKTICK + ` />`, }, { name: "set:html on style with template literal attribute with variable", source: `<style set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "set:html on Fragment", source: "<Fragment set:html={\"<p>&#x3C;i>This should NOT be italic&#x3C;/i></p>\"} />", }, { name: "set:html on Fragment with quoted attribute", source: "<Fragment set:html=\"<p>&#x3C;i>This should NOT be italic&#x3C;/i></p>\" />", }, { name: "set:html on Fragment with template literal attribute without variable", source: "<Fragment set:html=`<p>&#x3C;i>This should NOT be italic&#x3C;/i></p>` />", }, { name: "set:html on Fragment with template literal attribute with variable", source: `<Fragment set:html=` + BACKTICK + `${content}` + BACKTICK + ` />`, }, { name: "template literal attribute on component", source: `<Component class=` + BACKTICK + `red` + BACKTICK + ` />`, }, { name: "template literal attribute with variable on component", source: `<Component class=` + BACKTICK + `${color}` + BACKTICK + ` />`, }, { name: "define:vars on style", source: "<style>h1{color:green;}</style><style define:vars={{color:'green'}}>h1{color:var(--color)}</style><h1>testing</h1>", }, { name: "define:vars on style tag with style shorthand attribute on element", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style><h1 {style}>testing</h1>", }, { name: "define:vars on style tag with style expression attribute on element", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style><h1 style={myStyles}>testing</h1>", }, { name: "define:vars on style tag with style empty attribute on element", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style><h1 style>testing</h1>", }, { name: "define:vars on style tag with style quoted attribute on element", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style><h1 style='color: yellow;'>testing</h1>", }, { name: "define:vars on style tag with style template literal attribute on element", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style><h1 style=`color: ${color};`>testing</h1>", }, { name: "multiple define:vars on style", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style><style define:vars={{color:'red'}}>h2{color:var(--color)}</style><h1>foo</h1><h2>bar</h2>", }, { name: "define:vars on non-root elements", source: "<style define:vars={{color:'green'}}>h1{color:var(--color)}</style>{true ? <h1>foo</h1> : <h1>bar</h1>}", }, { name: "define:vars on script with StaticExpression turned on", // 1. An inline script with is:inline - right // 2. A hoisted script - wrong, shown up in scripts.add // 3. A define:vars hoisted script // 4. A define:vars inline script source: `<script is:inline>var one = 'one';</script><script>var two = 'two';</script><script define:vars={{foo:'bar'}}>var three = foo;</script><script is:inline define:vars={{foo:'bar'}}>var four = foo;</script>`, }, { name: "define:vars on a module script with imports", // Should not wrap with { } scope. source: `<script type="module" define:vars={{foo:'bar'}}>import 'foo';\nvar three = foo;</script>`, }, { name: "comments removed from attribute list", source: `<div><h1 {/* comment 1 */} value="1" {/* comment 2 */}>Hello</h1><Component {/* comment 1 */} value="1" {/* comment 2 */} /></div>`, }, { name: "includes comments for shorthand attribute", source: `<div><h1 {/* comment 1 */ id /* comment 2 */}>Hello</h1><Component {/* comment 1 */ id /* comment 2 */}/></div>`, }, { name: "includes comments for expression attribute", source: `<div><h1 attr={/* comment 1 */ isTrue ? 1 : 2 /* comment 2 */}>Hello</h1><Component attr={/* comment 1 */ isTrue ? 1 : 2 /* comment 2 */}/></div>`, }, { name: "comment only expressions are removed I", source: `{/* a comment 1 */}<h1>{/* a comment 2*/}Hello</h1>`, }, { name: "comment only expressions are removed II", source: `{ list.map((i) => ( <Component> { // hello } </Component> )) }`, }, { name: "comment only expressions are removed III", source: `{ list.map((i) => ( <Component> { /* hello */ } </Component> )) }`, }, { name: "component with only a script", source: "<script>console.log('hello world');</script>", }, { name: "passes filename into createComponent if passed into the compiler options", source: `<div>test</div>`, filename: "/projects/app/src/pages/page.astro", }, { name: "passes escaped filename into createComponent if it contains single quotes", source: `<div>test</div>`, filename: "/projects/app/src/pages/page-with-'-quotes.astro", }, { name: "maybeRenderHead not printed for hoisted scripts", source: `<script></script><Layout></Layout>`, filename: "/projects/app/src/pages/page.astro", }, { name: "complex recursive component", source: `{(<Fragment><Fragment set:html={` + BACKTICK + `<${Node.tag} ${stringifyAttributes(Node.attributes)}>` + BACKTICK + `} />{Node.children.map((child) => (<Astro.self node={child} />))}<Fragment set:html={` + BACKTICK + `</${Node.tag}>` + BACKTICK + `} /></Fragment>)}`, filename: "/projects/app/src/components/RenderNode.astro", }, { name: "multibyte character + style", source: `<style>a { font-size: 16px; }</style><a class="test">ツ</a>`, }, { name: "multibyte characters", source: `--- --- <h1>こんにちは</h1>`, }, { name: "multibyte character + script", source: `<script>console.log('foo')</script><a class="test">ツ</a>`, }, { name: "transition:name with an expression", source: `<div transition:name={one + '-' + 'two'}></div>`, filename: "/projects/app/src/pages/page.astro", transitions: true, }, { name: "transition:name with an template literal", source: "<div transition:name=`${one}-two`></div>", filename: "/projects/app/src/pages/page.astro", transitions: true, }, { name: "transition:animate with an expression", source: "<div transition:animate={slide({duration:15})}></div>", filename: "/projects/app/src/pages/page.astro", transitions: true, }, { name: "transition:animate on Component", source: `<Component class="bar" transition:animate="morph"></Component>`, filename: "/projects/app/src/pages/page.astro", transitions: true, }, { name: "transition:persist converted to a data attribute", source: `<div transition:persist></div>`, transitions: true, }, { name: "transition:persist uses transition:name if defined", source: `<div transition:persist transition:name="foo"></div>`, transitions: true, }, { name: "transition:persist-props converted to a data attribute", source: `<my-island transition:persist transition:persist-props="false"></my-island>`, transitions: true, }, { name: "trailing expression", source: `<Component />{}`, }, { name: "nested head content stays in the head", source: `--- const meta = { title: 'My App' }; --- <html> <head> <meta charset="utf-8" /> { meta && <title>{meta.title}</title> } <meta name="after"> </head> <body> <h1>My App</h1> </body> </html>`, }, { name: "namespace is preserved when inside an expression", source: `<svg>{<image />}</svg>`, }, { name: "head content with component first", source: `--- import Analytics from '../components/Analytics.astro'; --- <Analytics /> <title>{title}</title> <meta name="description" content="a description" />`, }, { name: "jsx comment between doctype and html", source: `<!doctype html> {/* Comment */} <html lang="en"> <head> <meta charset="UTF-8" /> </head> </html>`, }, { name: "multiline class attribute on component", source: "<Component class=\"some-class\n another-class\n third-class\">content</Component>", }, } for _, tt := range tests { if tt.only { tests = make([]testcase, 0) tests = append(tests, tt) break } } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // transform output from source code := test_utils.Dedent(tt.source) doc, err := astro.Parse(strings.NewReader(code)) h := handler.NewHandler(code, "<stdin>") if err != nil { t.Error(err) } hash := astro.HashString(code) transformOptions := transform.TransformOptions{ Scope: hash, } transform.ExtractStyles(doc, &transformOptions) transform.Transform(doc, transformOptions, h) // note: we want to test Transform in context here, but more advanced cases could be tested separately result := PrintToJS(code, doc, 0, transform.TransformOptions{ Scope: "XXXX", InternalURL: "http://localhost:3000/", Filename: tt.filename, AstroGlobalArgs: "'https://astro.build'", TransitionsAnimationURL: "transitions.css", }, h) output := string(result.Output) test_utils.MakeSnapshot( &test_utils.SnapshotOptions{ Testing: t, TestCaseName: tt.name, Input: code, Output: output, Kind: test_utils.JsOutput, FolderName: "__printer_js__", }) }) } } func TestPrintToJSON(t *testing.T) { tests := []jsonTestcase{ { name: "basic", source: `<h1>Hello world!</h1>`, }, { name: "expression", source: `<h1>Hello {world}</h1>`, }, { name: "Component", source: `<Component />`, }, { name: "custom-element", source: `<custom-element />`, }, { name: "Doctype", source: `<!DOCTYPE html />`, }, { name: "Comment", source: `<!--hello-->`, }, { name: "Comment preserves whitespace", source: `<!-- hello -->`, }, { name: "Fragment Shorthand", source: `<>Hello</>`, }, { name: "Fragment Literal", source: `<Fragment>World</Fragment>`, }, { name: "Frontmatter", source: `--- const a = "hey" --- <div>{a}</div>`, }, { name: "JSON escape", source: `--- const a = "\n" const b = "\"" const c = '\'' --- {a + b + c}`, }, { name: "Preserve namespaces", source: `<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><rect xlink:href="#id"></svg>`, }, { name: "style before html", source: `<style></style><html><body><h1>Hello world!</h1></body></html>`, }, { name: "style after html", source: `<html><body><h1>Hello world!</h1></body></html><style></style>`, }, { name: "style after empty html", source: `<html></html><style></style>`, }, { name: "style after html with component in head", source: `<html lang="en"><head><BaseHead /></head></html><style>@use "../styles/global.scss";</style>`, }, { name: "style after html with component in head and body", source: `<html lang="en"><head><BaseHead /></head><body><Header /></body></html><style>@use "../styles/global.scss";</style>`, }, { name: "style after body with component in head and body", source: `<html lang="en"><head><BaseHead /></head><body><Header /></body><style>@use "../styles/global.scss";</style></html>`, }, { name: "style in html", source: `<html><body><h1>Hello world!</h1></body><style></style></html>`, }, { name: "style in body", source: `<html><body><h1>Hello world!</h1><style></style></body></html>`, }, { name: "element with unterminated double quote attribute", source: `<main id="gotcha />`, }, { name: "element with unterminated single quote attribute", source: `<main id='gotcha />`, }, { name: "element with unterminated template literal attribute", source: `<main id=` + BACKTICK + `gotcha />`, }, { name: "jsx comment between doctype and html", source: `<!doctype html> {/* Comment */} <html lang="en"> <head> <meta charset="UTF-8" /> </head> </html>`, }, } for _, tt := range tests { if tt.only { tests = make([]jsonTestcase, 0) tests = append(tests, tt) break } } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // transform output from source code := test_utils.Dedent(tt.source) doc, err := astro.ParseWithOptions(strings.NewReader(code), astro.ParseOptionEnableLiteral(true), astro.ParseOptionWithHandler(&handler.Handler{})) if err != nil { t.Error(err) } result := PrintToJSON(code, doc, types.ParseOptions{Position: false}) test_utils.MakeSnapshot( &test_utils.SnapshotOptions{ Testing: t, TestCaseName: tt.name, Input: code, Output: string(result.Output), Kind: test_utils.JsonOutput, FolderName: "__printer_json__", }) }) } } ================================================ FILE: internal/printer/utils.go ================================================ package printer import ( "fmt" "regexp" "strings" "github.com/iancoleman/strcase" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/js_scanner" "github.com/withastro/compiler/internal/transform" ) func escapeText(src string) string { return escapeBackticks( escapeInterpolation( escapeExistingEscapes(src), ), ) } func escapeBraces(src string) string { return escapeStarSlash(escapeTSXExpressions( escapeExistingEscapes(src), )) } func escapeStarSlash(src string) string { return strings.ReplaceAll(src, "*/", "*\\/") } func getTSXComponentName(filename string) string { if filename == "<stdin>" { return "__AstroComponent_" } if len(filename) == 0 { return "__AstroComponent_" } parts := strings.Split(filename, "/") part := parts[len(parts)-1] if len(part) == 0 { return "__AstroComponent_" } basename := strcase.ToCamel(strings.Split(part, ".")[0]) if js_scanner.IsIdentifier([]byte(basename)) { return fmt.Sprintf("%s%s", basename, "__AstroComponent_") } else { return "__AstroComponent_" } } func getComponentName(filename string) string { if len(filename) == 0 { return "$$Component" } parts := strings.Split(filename, "/") part := parts[len(parts)-1] if len(part) == 0 { return "$$Component" } basename := strcase.ToCamel(strings.Split(part, ".")[0]) if basename == "Astro" { return "$$Component" } return strings.Join([]string{"$$", basename}, "") } func escapeExistingEscapes(src string) string { return strings.Replace(src, "\\", "\\\\", -1) } func escapeTSXExpressions(src string) string { open := regexp.MustCompile(`{`) close := regexp.MustCompile(`}`) return close.ReplaceAllString(open.ReplaceAllString(src, `\\{`), `\\}`) } func escapeInterpolation(src string) string { interpolation := regexp.MustCompile(`\${`) return interpolation.ReplaceAllString(src, "\\${") } // Escape backtick characters for Text nodes func escapeBackticks(src string) string { backticks := regexp.MustCompile("`") return backticks.ReplaceAllString(src, "\\`") } func escapeSingleQuote(str string) string { return strings.Replace(str, "'", "\\'", -1) } func escapeDoubleQuote(str string) string { return strings.Replace(str, `"`, "\\\"", -1) } func escapeNewlines(str string) string { str = strings.Replace(str, "\n", `\n`, -1) str = strings.Replace(str, "\r", `\r`, -1) return str } func encodeDoubleQuote(str string) string { return strings.Replace(str, `"`, "&quot;", -1) } func convertAttributeValue(n *astro.Node, attrName string) string { expr := `""` if transform.HasAttr(n, attrName) { attr := transform.GetAttr(n, attrName) switch attr.Type { case astro.QuotedAttribute: expr = fmt.Sprintf(`"%s"`, attr.Val) case astro.ExpressionAttribute: expr = fmt.Sprintf(`(%s)`, attr.Val) case astro.TemplateLiteralAttribute: expr = fmt.Sprintf("`%s`", attr.Val) } } return expr } ================================================ FILE: internal/sourcemap/sourcemap.go ================================================ package sourcemap import ( "bytes" "unicode/utf8" "github.com/withastro/compiler/internal/helpers" "github.com/withastro/compiler/internal/loc" ) type Mapping struct { GeneratedLine int // 0-based GeneratedColumn int // 0-based count of UTF-16 code units SourceIndex int // 0-based OriginalLine int // 0-based OriginalColumn int // 0-based count of UTF-16 code units } type SourceMap struct { Sources []string SourcesContent []SourceContent Mappings []Mapping } type SourceContent struct { // This stores both the unquoted and the quoted values. We try to use the // already-quoted value if possible so we don't need to re-quote it // unnecessarily for maximum performance. Quoted string // But sometimes we need to re-quote the value, such as when it contains // non-ASCII characters and we are in ASCII-only mode. In that case we quote // this parsed UTF-16 value. Value []uint16 } func (sm *SourceMap) Find(line int, column int) *Mapping { mappings := sm.Mappings // Binary search count := len(mappings) index := 0 for count > 0 { step := count / 2 i := index + step mapping := mappings[i] if mapping.GeneratedLine < line || (mapping.GeneratedLine == line && mapping.GeneratedColumn <= column) { index = i + 1 count -= step + 1 } else { count = step } } // Handle search failure if index > 0 { mapping := &mappings[index-1] // Match the behavior of the popular "source-map" library from Mozilla if mapping.GeneratedLine == line { return mapping } } return nil } var base64 = []byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") // A single base 64 digit can contain 6 bits of data. For the base 64 variable // length quantities we use in the source map spec, the first bit is the sign, // the next four bits are the actual value, and the 6th bit is the continuation // bit. The continuation bit tells us whether there are more digits in this // value following this digit. // // Continuation // | Sign // | | // V V // 101011 func EncodeVLQ(value int) []byte { var vlq int if value < 0 { vlq = ((-value) << 1) | 1 } else { vlq = value << 1 } // Handle the common case up front without allocations if (vlq >> 5) == 0 { digit := vlq & 31 return base64[digit : digit+1] } encoded := []byte{} for { digit := vlq & 31 vlq >>= 5 // If there are still more digits in this value, we must make sure the // continuation bit is marked if vlq != 0 { digit |= 32 } encoded = append(encoded, base64[digit]) if vlq == 0 { break } } return encoded } func DecodeVLQ(encoded []byte, start int) (int, int) { shift := 0 vlq := 0 // Scan over the input for { index := bytes.IndexByte(base64, encoded[start]) if index < 0 { break } // Decode a single byte vlq |= (index & 31) << shift start++ shift += 5 // Stop if there's no continuation bit if (index & 32) == 0 { break } } // Recover the value value := vlq >> 1 if (vlq & 1) != 0 { value = -value } return value, start } func DecodeVLQUTF16(encoded []uint16) (int, int, bool) { n := len(encoded) if n == 0 { return 0, 0, false } // Scan over the input current := 0 shift := 0 vlq := 0 for { if current >= n { return 0, 0, false } index := bytes.IndexByte(base64, byte(encoded[current])) if index < 0 { return 0, 0, false } // Decode a single byte vlq |= (index & 31) << shift current++ shift += 5 // Stop if there's no continuation bit if (index & 32) == 0 { break } } // Recover the value var value = vlq >> 1 if (vlq & 1) != 0 { value = -value } return value, current, true } type LineColumnOffset struct { Lines int Columns int } func (a LineColumnOffset) ComesBefore(b LineColumnOffset) bool { return a.Lines < b.Lines || (a.Lines == b.Lines && a.Columns < b.Columns) } func (a *LineColumnOffset) Add(b LineColumnOffset) { if b.Lines == 0 { a.Columns += b.Columns } else { a.Lines += b.Lines a.Columns = b.Columns } } func (offset *LineColumnOffset) AdvanceBytes(bytes []byte) { columns := offset.Columns for len(bytes) > 0 { c, width := utf8.DecodeRune(bytes) bytes = bytes[width:] switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' && len(bytes) > 0 && bytes[0] == '\n' { columns++ continue } offset.Lines++ columns = 0 default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { columns++ } else { columns += 2 } } } offset.Columns = columns } func (offset *LineColumnOffset) AdvanceString(text string) { columns := offset.Columns for i, c := range text { switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' && i+1 < len(text) && text[i+1] == '\n' { columns++ continue } offset.Lines++ columns = 0 default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { columns++ } else { columns += 2 } } } offset.Columns = columns } type SourceMapPieces struct { Prefix []byte Mappings []byte Suffix []byte } func (pieces SourceMapPieces) HasContent() bool { return len(pieces.Prefix)+len(pieces.Mappings)+len(pieces.Suffix) > 0 } type SourceMapShift struct { Before LineColumnOffset After LineColumnOffset } func (pieces SourceMapPieces) Finalize(shifts []SourceMapShift) []byte { // An optimized path for when there are no shifts if len(shifts) == 1 { bytes := pieces.Prefix minCap := len(bytes) + len(pieces.Mappings) + len(pieces.Suffix) if cap(bytes) < minCap { bytes = append(make([]byte, 0, minCap), bytes...) } bytes = append(bytes, pieces.Mappings...) bytes = append(bytes, pieces.Suffix...) return bytes } startOfRun := 0 current := 0 generated := LineColumnOffset{} prevShiftColumnDelta := 0 j := helpers.Joiner{} // Start the source map j.AddBytes(pieces.Prefix) // This assumes that a) all mappings are valid and b) all mappings are ordered // by increasing generated position. This should be the case for all mappings // generated by esbuild, which should be the only mappings we process here. for current < len(pieces.Mappings) { // Handle a line break if pieces.Mappings[current] == ';' { generated.Lines++ generated.Columns = 0 prevShiftColumnDelta = 0 current++ continue } potentialEndOfRun := current // Read the generated column generatedColumnDelta, next := DecodeVLQ(pieces.Mappings, current) generated.Columns += generatedColumnDelta current = next potentialStartOfRun := current // Skip over the original position information _, current = DecodeVLQ(pieces.Mappings, current) // The original source _, current = DecodeVLQ(pieces.Mappings, current) // The original line _, current = DecodeVLQ(pieces.Mappings, current) // The original column // Skip a trailing comma if current < len(pieces.Mappings) && pieces.Mappings[current] == ',' { current++ } // Detect crossing shift boundaries didCrossBoundary := false for len(shifts) > 1 && shifts[1].Before.ComesBefore(generated) { shifts = shifts[1:] didCrossBoundary = true } if !didCrossBoundary { continue } // This shift isn't relevant if the next mapping after this shift is on a // following line. In that case, don't split and keep scanning instead. shift := shifts[0] if shift.After.Lines != generated.Lines { continue } // Add all previous mappings in a single run for efficiency. Since source // mappings are relative, no data needs to be modified inside this run. j.AddBytes(pieces.Mappings[startOfRun:potentialEndOfRun]) // Then modify the first mapping across the shift boundary with the updated // generated column value. It's simplest to only support column shifts. This // is reasonable because import paths should not contain newlines. if shift.Before.Lines != shift.After.Lines { panic("Unexpected line change when shifting source maps") } shiftColumnDelta := shift.After.Columns - shift.Before.Columns j.AddBytes(EncodeVLQ(generatedColumnDelta + shiftColumnDelta - prevShiftColumnDelta)) prevShiftColumnDelta = shiftColumnDelta // Finally, start the next run after the end of this generated column offset startOfRun = potentialStartOfRun } // Finish the source map j.AddBytes(pieces.Mappings[startOfRun:]) j.AddBytes(pieces.Suffix) return j.Done() } // Coordinates in source maps are stored using relative offsets for size // reasons. When joining together chunks of a source map that were emitted // in parallel for different parts of a file, we need to fix up the first // segment of each chunk to be relative to the end of the previous chunk. type SourceMapState struct { // This isn't stored in the source map. It's only used by the bundler to join // source map chunks together correctly. GeneratedLine int // These are stored in the source map in VLQ format. GeneratedColumn int SourceIndex int OriginalLine int OriginalColumn int } // Source map chunks are computed in parallel for speed. Each chunk is relative // to the zero state instead of being relative to the end state of the previous // chunk, since it's impossible to know the end state of the previous chunk in // a parallel computation. // // After all chunks are computed, they are joined together in a second pass. // This rewrites the first mapping in each chunk to be relative to the end // state of the previous chunk. func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startState SourceMapState, sourceMap []byte) { // Handle line breaks in between this mapping and the previous one if startState.GeneratedLine != 0 { j.AddBytes(bytes.Repeat([]byte{';'}, startState.GeneratedLine)) prevEndState.GeneratedColumn = 0 } // Skip past any leading semicolons, which indicate line breaks semicolons := 0 for sourceMap[semicolons] == ';' { semicolons++ } if semicolons > 0 { j.AddBytes(sourceMap[:semicolons]) sourceMap = sourceMap[semicolons:] prevEndState.GeneratedColumn = 0 startState.GeneratedColumn = 0 } // Strip off the first mapping from the buffer. The first mapping should be // for the start of the original file (the printer always generates one for // the start of the file). generatedColumn, i := DecodeVLQ(sourceMap, 0) sourceIndex, i := DecodeVLQ(sourceMap, i) originalLine, i := DecodeVLQ(sourceMap, i) originalColumn, i := DecodeVLQ(sourceMap, i) sourceMap = sourceMap[i:] // Rewrite the first mapping to be relative to the end state of the previous // chunk. We now know what the end state is because we're in the second pass // where all chunks have already been generated. startState.SourceIndex += sourceIndex startState.GeneratedColumn += generatedColumn startState.OriginalLine += originalLine startState.OriginalColumn += originalColumn j.AddBytes(appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState)) // Then append everything after that without modification. j.AddBytes(sourceMap) } func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState) []byte { // Put commas in between mappings if lastByte != 0 && lastByte != ';' && lastByte != '"' { buffer = append(buffer, ',') } // Record the generated column (the line is recorded using ';' elsewhere) buffer = append(buffer, EncodeVLQ(currentState.GeneratedColumn-prevState.GeneratedColumn)...) prevState.GeneratedColumn = currentState.GeneratedColumn // Record the generated source buffer = append(buffer, EncodeVLQ(currentState.SourceIndex-prevState.SourceIndex)...) prevState.SourceIndex = currentState.SourceIndex // Record the original line buffer = append(buffer, EncodeVLQ(currentState.OriginalLine-prevState.OriginalLine)...) prevState.OriginalLine = currentState.OriginalLine // Record the original column buffer = append(buffer, EncodeVLQ(currentState.OriginalColumn-prevState.OriginalColumn)...) prevState.OriginalColumn = currentState.OriginalColumn return buffer } type LineOffsetTable struct { byteOffsetToStartOfLine int // The source map specification is very loose and does not specify what // column numbers actually mean. The popular "source-map" library from Mozilla // appears to interpret them as counts of UTF-16 code units, so we generate // those too for compatibility. // // We keep mapping tables around to accelerate conversion from byte offsets // to UTF-16 code unit counts. However, this mapping takes up a lot of memory // and generates a lot of garbage. Since most JavaScript is ASCII and the // mapping for ASCII is 1:1, we avoid creating a table for ASCII-only lines // as an optimization. byteOffsetToFirstNonASCII int columnsForNonASCII []int utf16LineLength int } func GenerateLineOffsetTables(contents string, approximateLineCount int) []LineOffsetTable { var ColumnsForNonASCII []int ByteOffsetToFirstNonASCII := int(0) lineByteOffset := 0 columnByteOffset := 0 column := int(0) // Preallocate the top-level table using the approximate line count from the lexer lineOffsetTables := make([]LineOffsetTable, 0, approximateLineCount) for i, c := range contents { // Mark the start of the next line if column == 0 { lineByteOffset = i } // Start the mapping if this character is non-ASCII if c > 0x7F && ColumnsForNonASCII == nil { columnByteOffset = i - lineByteOffset ByteOffsetToFirstNonASCII = int(columnByteOffset) ColumnsForNonASCII = []int{} } // Update the per-byte column offsets if ColumnsForNonASCII != nil { for lineBytesSoFar := i - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ { ColumnsForNonASCII = append(ColumnsForNonASCII, column) } } switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' && i+1 < len(contents) && contents[i+1] == '\n' { column++ continue } if c <= 0xFFFF { column++ } else { column += 2 } lineOffsetTables = append(lineOffsetTables, LineOffsetTable{ byteOffsetToStartOfLine: int(lineByteOffset), byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII, columnsForNonASCII: ColumnsForNonASCII, utf16LineLength: column, }) columnByteOffset = 0 ByteOffsetToFirstNonASCII = 0 ColumnsForNonASCII = nil column = 0 default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { column++ } else { column += 2 } } } // Mark the start of the next line if column == 0 { lineByteOffset = len(contents) } // Do one last update for the column at the end of the file if ColumnsForNonASCII != nil { for lineBytesSoFar := len(contents) - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ { ColumnsForNonASCII = append(ColumnsForNonASCII, column) } } lineOffsetTables = append(lineOffsetTables, LineOffsetTable{ byteOffsetToStartOfLine: int(lineByteOffset), byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII, columnsForNonASCII: ColumnsForNonASCII, utf16LineLength: column, }) return lineOffsetTables } type Chunk struct { Buffer []byte // This end state will be used to rewrite the start of the following source // map chunk so that the delta-encoded VLQ numbers are preserved. EndState SourceMapState // There probably isn't a source mapping at the end of the file (nor should // there be) but if we're appending another source map chunk after this one, // we'll need to know how many characters were in the last line we generated. FinalGeneratedColumn int ShouldIgnore bool } type ChunkBuilder struct { inputSourceMap *SourceMap sourceMap []byte prevLoc loc.Loc prevState SourceMapState lastGeneratedUpdate int generatedColumn int hasPrevState bool lineOffsetTables []LineOffsetTable // This is a workaround for a bug in the popular "source-map" library: // https://github.com/mozilla/source-map/issues/261. The library will // sometimes return null when querying a source map unless every line // starts with a mapping at column zero. // // The workaround is to replicate the previous mapping if a line ends // up not starting with a mapping. This is done lazily because we want // to avoid replicating the previous mapping if we don't need to. lineStartsWithMapping bool coverLinesWithoutMappings bool } func MakeChunkBuilder(inputSourceMap *SourceMap, lineOffsetTables []LineOffsetTable) ChunkBuilder { return ChunkBuilder{ inputSourceMap: inputSourceMap, prevLoc: loc.Loc{Start: -1}, lineOffsetTables: lineOffsetTables, // We automatically repeat the previous source mapping if we ever generate // a line that doesn't start with a mapping. This helps give files more // complete mapping coverage without gaps. // // However, we probably shouldn't do this if the input file has a nested // source map that we will be remapping through. We have no idea what state // that source map is in and it could be pretty scrambled. // // I've seen cases where blindly repeating the last mapping for subsequent // lines gives very strange and unhelpful results with source maps from // other tools. coverLinesWithoutMappings: inputSourceMap == nil, } } func (b *ChunkBuilder) GetLineAndColumnForLocation(location loc.Loc) []int { b.prevLoc = location // Binary search to find the line lineOffsetTables := b.lineOffsetTables count := len(lineOffsetTables) originalLine := 0 for count > 0 { step := count / 2 i := originalLine + step if len(lineOffsetTables) > i && lineOffsetTables[i].byteOffsetToStartOfLine <= location.Start { originalLine = i + 1 count = count - step - 1 } else { count = step } } originalLine-- // Use the line to compute the column line := &lineOffsetTables[originalLine] originalColumn := int(location.Start - line.byteOffsetToStartOfLine) if line.columnsForNonASCII != nil && originalColumn >= int(line.byteOffsetToFirstNonASCII) { newColumn := originalColumn - int(line.byteOffsetToFirstNonASCII) if len(line.columnsForNonASCII) > newColumn { originalColumn = int(line.columnsForNonASCII[newColumn]) } } // 1-based line, 1-based column return []int{originalLine + 1, originalColumn + 1} } func (b *ChunkBuilder) OffsetAt(location loc.Loc) int { lineAndColumn := b.GetLineAndColumnForLocation(location) line := lineAndColumn[0] - 1 column := lineAndColumn[1] - 1 // Collect the length of every line before this one offset := 0 for i := 0; i < line; i++ { currentLine := b.lineOffsetTables[i] offset += currentLine.utf16LineLength } // Add the column within this line return offset + column } func (b *ChunkBuilder) AddSourceMapping(location loc.Loc, output []byte) { if location == b.prevLoc { return } b.prevLoc = location if location.Start < 0 { b.appendMapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: b.generatedColumn, SourceIndex: 0, OriginalLine: 0, OriginalColumn: 0, }) // This line now has a mapping on it, so don't insert another one b.lineStartsWithMapping = true return } // Binary search to find the line lineOffsetTables := b.lineOffsetTables count := len(lineOffsetTables) originalLine := 0 for count > 0 { step := count / 2 i := originalLine + step if i > -1 && lineOffsetTables[i].byteOffsetToStartOfLine <= location.Start { originalLine = i + 1 count = count - step - 1 } else { count = step } } originalLine-- // Use the line to compute the column line := &lineOffsetTables[originalLine] originalColumn := int(location.Start - line.byteOffsetToStartOfLine) if line.columnsForNonASCII != nil && originalColumn >= int(line.byteOffsetToFirstNonASCII) { if len(line.columnsForNonASCII) > originalColumn-int(line.byteOffsetToFirstNonASCII) { originalColumn = int(line.columnsForNonASCII[originalColumn-int(line.byteOffsetToFirstNonASCII)]) } } b.updateGeneratedLineAndColumn(output) // If this line doesn't start with a mapping and we're about to add a mapping // that's not at the start, insert a mapping first so the line starts with one. if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.generatedColumn > 0 && b.hasPrevState { b.appendMappingWithoutRemapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: 0, SourceIndex: b.prevState.SourceIndex, OriginalLine: b.prevState.OriginalLine, OriginalColumn: b.prevState.OriginalColumn, }) } b.appendMapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: b.generatedColumn, OriginalLine: originalLine, OriginalColumn: originalColumn, }) // This line now has a mapping on it, so don't insert another one b.lineStartsWithMapping = true } func (b *ChunkBuilder) GenerateChunk(output []byte) Chunk { b.updateGeneratedLineAndColumn(output) shouldIgnore := true for _, c := range b.sourceMap { if c != ';' { shouldIgnore = false break } } return Chunk{ Buffer: b.sourceMap, EndState: b.prevState, FinalGeneratedColumn: b.generatedColumn, ShouldIgnore: shouldIgnore, } } // Scan over the printed text since the last source mapping and update the // generated line and column numbers func (b *ChunkBuilder) updateGeneratedLineAndColumn(output []byte) { for i, c := range string(output[b.lastGeneratedUpdate:]) { switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' { newlineCheck := b.lastGeneratedUpdate + i + 1 if newlineCheck < len(output) && output[newlineCheck] == '\n' { continue } } // If we're about to move to the next line and the previous line didn't have // any mappings, add a mapping at the start of the previous line. if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.hasPrevState { b.appendMappingWithoutRemapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: 0, SourceIndex: b.prevState.SourceIndex, OriginalLine: b.prevState.OriginalLine, OriginalColumn: b.prevState.OriginalColumn, }) } b.prevState.GeneratedLine++ b.prevState.GeneratedColumn = 0 b.generatedColumn = 0 b.sourceMap = append(b.sourceMap, ';') // This new line doesn't have a mapping yet b.lineStartsWithMapping = false default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { b.generatedColumn++ } else { b.generatedColumn += 2 } } } b.lastGeneratedUpdate = len(output) } func (b *ChunkBuilder) appendMapping(currentState SourceMapState) { // If the input file had a source map, map all the way back to the original if b.inputSourceMap != nil { mapping := b.inputSourceMap.Find( int(currentState.OriginalLine), int(currentState.OriginalColumn)) // Some locations won't have a mapping if mapping == nil { return } currentState.SourceIndex = int(mapping.SourceIndex) currentState.OriginalLine = int(mapping.OriginalLine) currentState.OriginalColumn = int(mapping.OriginalColumn) } b.appendMappingWithoutRemapping(currentState) } func (b *ChunkBuilder) appendMappingWithoutRemapping(currentState SourceMapState) { var lastByte byte if len(b.sourceMap) != 0 { lastByte = b.sourceMap[len(b.sourceMap)-1] } b.sourceMap = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState) b.prevState = currentState b.hasPrevState = true } ================================================ FILE: internal/t/t.go ================================================ package t type ParseOptions struct { Filename string Position bool } ================================================ FILE: internal/test_utils/test_utils.go ================================================ package test_utils import ( "fmt" "strings" "testing" "github.com/gkampitakis/go-snaps/snaps" "github.com/google/go-cmp/cmp" "github.com/lithammer/dedent" ) func RemoveNewlines(input string) string { return strings.ReplaceAll(input, "\n", "") } func Dedent(input string) string { return dedent.Dedent( // removes any leading whitespace strings.ReplaceAll( // compress linebreaks to 1 or 2 lines max strings.TrimLeft( strings.TrimRight(input, " \n\r"), // remove any trailing whitespace " \t\r\n"), // remove leading whitespace "\n\n\n", "\n\n"), ) } func ANSIDiff(x, y interface{}, opts ...cmp.Option) string { escapeCode := func(code int) string { return fmt.Sprintf("\x1b[%dm", code) } diff := cmp.Diff(x, y, opts...) if diff == "" { return "" } ss := strings.Split(diff, "\n") for i, s := range ss { switch { case strings.HasPrefix(s, "-"): ss[i] = escapeCode(31) + s + escapeCode(0) case strings.HasPrefix(s, "+"): ss[i] = escapeCode(32) + s + escapeCode(0) } } return strings.Join(ss, "\n") } // Removes unsupported characters from the test case name, because it will be used as name for the snapshot func RedactTestName(testCaseName string) string { snapshotName := strings.ReplaceAll(testCaseName, "#", "_") snapshotName = strings.ReplaceAll(snapshotName, "<", "_") snapshotName = strings.ReplaceAll(snapshotName, ">", "_") snapshotName = strings.ReplaceAll(snapshotName, ")", "_") snapshotName = strings.ReplaceAll(snapshotName, "(", "_") snapshotName = strings.ReplaceAll(snapshotName, ":", "_") snapshotName = strings.ReplaceAll(snapshotName, " ", "_") snapshotName = strings.ReplaceAll(snapshotName, "#", "_") snapshotName = strings.ReplaceAll(snapshotName, "'", "_") snapshotName = strings.ReplaceAll(snapshotName, "\"", "_") snapshotName = strings.ReplaceAll(snapshotName, "@", "_") snapshotName = strings.ReplaceAll(snapshotName, "`", "_") snapshotName = strings.ReplaceAll(snapshotName, "+", "_") return snapshotName } type OutputKind int const ( JsOutput = iota JsonOutput CssOutput HtmlOutput JsxOutput ) var outputKind = map[OutputKind]string{ JsOutput: "js", JsonOutput: "json", CssOutput: "css", HtmlOutput: "html", JsxOutput: "jsx", } type SnapshotOptions struct { // The testing instances Testing *testing.T // The name of the test case TestCaseName string // The initial source code that needs to be tested Input string // The final output Output string // The kind of **markdown block** that the output will be wrapped Kind OutputKind // The folder name that the snapshots will be stored FolderName string } // It creates a snapshot for the given test case, the snapshot will include the input and the output of the test case func MakeSnapshot(options *SnapshotOptions) { t := options.Testing testCaseName := options.TestCaseName input := options.Input output := options.Output kind := options.Kind folderName := "__snapshots__" if options.FolderName != "" { folderName = options.FolderName } snapshotName := RedactTestName(testCaseName) s := snaps.WithConfig( snaps.Filename(snapshotName), snaps.Dir(folderName), ) snapshot := "## Input\n\n```\n" snapshot += Dedent(input) snapshot += "\n```\n\n## Output\n\n" snapshot += "```" + outputKind[kind] + "\n" snapshot += Dedent(output) snapshot += "\n```" s.MatchSnapshot(t, snapshot) } ================================================ FILE: internal/token.go ================================================ // Copyright 2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package astro import ( "bytes" "errors" "fmt" "io" "strconv" "strings" "unicode" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/loc" "golang.org/x/net/html/atom" ) // A TokenType is the type of a Token. type TokenType uint32 const ( // ErrorToken means that an error occurred during tokenization. ErrorToken TokenType = iota // TextToken means a text node. TextToken // A StartTagToken looks like <a>. StartTagToken // An EndTagToken looks like </a>. EndTagToken // A SelfClosingTagToken tag looks like <br/>. SelfClosingTagToken // A CommentToken looks like <!--x-->. CommentToken // A DoctypeToken looks like <!DOCTYPE x> DoctypeToken // ASTRO EXTENSIONS // A FenceToken is the opening or closing --- of Frontmatter FrontmatterFenceToken // A StartExpressionToken looks like { and can contain StartExpressionToken // An EndExpressionToken looks like } EndExpressionToken ) // FrontmatterState tracks the open/closed state of Frontmatter. type FrontmatterState uint32 const ( FrontmatterInitial FrontmatterState = iota FrontmatterOpen FrontmatterClosed ) // AttributeType is the type of an Attribute type AttributeType uint32 func (t AttributeType) String() string { switch t { case QuotedAttribute: return "quoted" case EmptyAttribute: return "empty" case ExpressionAttribute: return "expression" case SpreadAttribute: return "spread" case ShorthandAttribute: return "shorthand" case TemplateLiteralAttribute: return "template-literal" } return "Invalid(" + strconv.Itoa(int(t)) + ")" } const ( QuotedAttribute AttributeType = iota EmptyAttribute ExpressionAttribute SpreadAttribute ShorthandAttribute TemplateLiteralAttribute ) // ErrBufferExceeded means that the buffering limit was exceeded. var ErrBufferExceeded = errors.New("max buffer exceeded") // String returns a string representation of the TokenType. func (t TokenType) String() string { switch t { case ErrorToken: return "Error" case TextToken: return "Text" case StartTagToken: return "StartTag" case EndTagToken: return "EndTag" case SelfClosingTagToken: return "SelfClosingTag" case CommentToken: return "Comment" case DoctypeToken: return "Doctype" case FrontmatterFenceToken: return "FrontmatterFence" case StartExpressionToken: return "StartExpression" case EndExpressionToken: return "EndExpression" } return "Invalid(" + strconv.Itoa(int(t)) + ")" } func (fm FrontmatterState) String() string { switch fm { case FrontmatterInitial: return "Initial" case FrontmatterOpen: return "Open" case FrontmatterClosed: return "Closed" } return "Invalid(" + strconv.Itoa(int(fm)) + ")" } // An Attribute is an attribute namespace-key-value triple. Namespace is // non-empty for foreign attributes like xlink, Key is alphabetic (and hence // does not contain escapable characters like '&', '<' or '>'), and Val is // unescaped (it looks like "a<b" rather than "a&lt;b"). // // Namespace is only used by the parser, not the tokenizer. type Attribute struct { Namespace string Key string KeyLoc loc.Loc Val string ValLoc loc.Loc Tokenizer *Tokenizer Type AttributeType } type Expression struct { Data string Children []Token Loc loc.Loc } // A Token consists of a TokenType and some Data (tag name for start and end // tags, content for text, comments and doctypes). A tag Token may also contain // a slice of Attributes. Data is unescaped for all Tokens (it looks like "a<b" // rather than "a&lt;b"). For tag Tokens, DataAtom is the atom for Data, or // zero if Data is not a known tag name. type Token struct { Type TokenType DataAtom atom.Atom Data string Attr []Attribute Loc loc.Loc } // tagString returns a string representation of a tag Token's Data and Attr. func (t Token) tagString() string { if len(t.Attr) == 0 { return t.Data } buf := bytes.NewBufferString(t.Data) for _, a := range t.Attr { buf.WriteByte(' ') switch a.Type { case QuotedAttribute: buf.WriteString(a.Key) buf.WriteString(`="`) escape(buf, a.Val) buf.WriteByte('"') case EmptyAttribute: buf.WriteString(a.Key) case ExpressionAttribute: buf.WriteString(a.Key) buf.WriteString(`={`) buf.WriteString(a.Val) buf.WriteByte('}') case TemplateLiteralAttribute: buf.WriteString(a.Key) buf.WriteByte('=') buf.WriteByte('{') buf.WriteByte('`') escape(buf, a.Val) buf.WriteByte('`') buf.WriteByte('}') case ShorthandAttribute: buf.WriteByte('{') buf.WriteString(a.Key) buf.WriteByte('}') case SpreadAttribute: buf.WriteString("{...") buf.WriteString(a.Key) buf.WriteByte('}') default: buf.WriteString(a.Key) } } return buf.String() } // String returns a string representation of the Token. func (t Token) String() string { switch t.Type { case ErrorToken: return "" case TextToken: return EscapeString(t.Data) case StartTagToken: return "<" + t.tagString() + ">" case EndTagToken: return "</" + t.tagString() + ">" case SelfClosingTagToken: return "<" + t.tagString() + "/>" case CommentToken: return "<!--" + t.Data + "-->" case DoctypeToken: return "<!DOCTYPE " + t.Data + ">" case FrontmatterFenceToken: return "---" case StartExpressionToken: return "{" case EndExpressionToken: return "}" } return "Invalid(" + strconv.Itoa(int(t.Type)) + ")" } // A Tokenizer returns a stream of HTML Tokens. type Tokenizer struct { // r is the source of the HTML text. r io.Reader // tt is the TokenType of the current token. tt TokenType prevToken Token fm FrontmatterState // err is the first error encountered during tokenization. It is possible // for tt != Error && err != nil to hold: this means that Next returned a // valid token but the subsequent Next call will return an error token. // For example, if the HTML text input was just "plain", then the first // Next call would set z.err to io.EOF but return a TextToken, and all // subsequent Next calls would return an ErrorToken. // err is never reset. Once it becomes non-nil, it stays non-nil. err error // buf[raw.Start:raw.End] holds the raw bytes of the current token. // buf[raw.End:] is buffered input that will yield future tokens. raw loc.Span buf []byte // buf[data.Start:data.End] holds the raw bytes of the current token's data: // a text token's text, a tag token's tag name, etc. data loc.Span // pendingAttr is the attribute key and value currently being tokenized. // When complete, pendingAttr is pushed onto attr. nAttrReturned is // incremented on each call to TagAttr. pendingAttr [2]loc.Span pendingAttrType AttributeType attr [][2]loc.Span attrTypes []AttributeType attrExpressionStack int attrTemplateLiteralStack []int nAttrReturned int dashCount int // expressionStack is an array of counters tracking opening and closing // braces in nested expressions expressionStack []int expressionElementStack [][]string openBraceIsExpressionStart bool // rawTag is the "script" in "</script>" that closes the next token. If // non-empty, the subsequent call to Next will return a raw or RCDATA text // token: one that treats "<p>" as text instead of an element. // rawTag's contents are lower-cased. rawTag string // noExpressionTag is the "math" in "<math>". If non-empty, any instances // of "{" will be treated as raw text rather than an StartExpressionToken. // noExpressionTag's contents are lower-cased. noExpressionTag string // stringStartChar is the character that opened the last string: ', ", or ` // stringStartChar byte // stringIsOpen will be true while in the context of a string // stringIsOpen bool // textIsRaw is whether the current text token's data is not escaped. textIsRaw bool // convertNUL is whether NUL bytes in the current token's data should // be converted into \ufffd replacement characters. convertNUL bool // allowCDATA is whether CDATA sections are allowed in the current context. allowCDATA bool handler *handler.Handler } // AllowCDATA sets whether or not the tokenizer recognizes <![CDATA[foo]]> as // the text "foo". The default value is false, which means to recognize it as // a bogus comment "<!-- [CDATA[foo]] -->" instead. // // Strictly speaking, an HTML5 compliant tokenizer should allow CDATA if and // only if tokenizing foreign content, such as MathML and SVG. However, // tracking foreign-contentness is difficult to do purely in the tokenizer, // as opposed to the parser, due to HTML integration points: an <svg> element // can contain a <foreignObject> that is foreign-to-SVG but not foreign-to- // HTML. For strict compliance with the HTML5 tokenization algorithm, it is the // responsibility of the user of a tokenizer to call AllowCDATA as appropriate. // In practice, if using the tokenizer without caring whether MathML or SVG // CDATA is text or comments, such as tokenizing HTML to find all the anchor // text, it is acceptable to ignore this responsibility. func (z *Tokenizer) AllowCDATA(allowCDATA bool) { z.allowCDATA = allowCDATA } // NextIsNotRawText instructs the tokenizer that the next token should not be // considered as 'raw text'. Some elements, such as script and title elements, // normally require the next token after the opening tag to be 'raw text' that // has no child elements. For example, tokenizing "<title>a<b>c</b>d</title>" // yields a start tag token for "<title>", a text token for "a<b>c</b>d", and // an end tag token for "</title>". There are no distinct start tag or end tag // tokens for the "<b>" and "</b>". // // The only exception is <style>, which should be treated as raw text no // matter what (handled in the conditional). // // This tokenizer implementation will generally look for raw text at the right // times. Strictly speaking, an HTML5 compliant tokenizer should not look for // raw text if in foreign content: <title> generally needs raw text, but a // <title> inside an <svg> does not. Another example is that a <textarea> // generally needs raw text, but a <textarea> is not allowed as an immediate // child of a <select>; in normal parsing, a <textarea> implies </select>, but // one cannot close the implicit element when parsing a <select>'s InnerHTML. // Similarly to AllowCDATA, tracking the correct moment to override raw-text- // ness is difficult to do purely in the tokenizer, as opposed to the parser. // For strict compliance with the HTML5 tokenization algorithm, it is the // responsibility of the user of a tokenizer to call NextIsNotRawText as // appropriate. In practice, like AllowCDATA, it is acceptable to ignore this // responsibility for basic usage. // // Note that this 'raw text' concept is different from the one offered by the // Tokenizer.Raw method. func (z *Tokenizer) NextIsNotRawText() { if z.rawTag != "style" { z.rawTag = "" } } // Err returns the error associated with the most recent ErrorToken token. // This is typically io.EOF, meaning the end of tokenization. func (z *Tokenizer) Err() error { if z.tt != ErrorToken { return nil } return z.err } // readByte returns the next byte from the input buffer. // z.buf[z.raw.Start:z.raw.End] remains a contiguous byte // slice that holds all the bytes read so far for the current token. // Pre-condition: z.err == nil. func (z *Tokenizer) readByte() byte { if z.raw.End >= len(z.buf) { z.err = io.EOF // note: io.EOF is the only “safe” error that is a signal for the compiler to exit cleanly return 0 } x := z.buf[z.raw.End] z.raw.End++ return x } // Buffered returns a slice containing data buffered but not yet tokenized. func (z *Tokenizer) Buffered() []byte { return z.buf[z.raw.End:] } // skipWhiteSpace skips past any white space. func (z *Tokenizer) skipWhiteSpace() { if z.err != nil { return } for { c := z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in skipWhiteSpace: \"%v\"\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if !unicode.IsSpace(rune(c)) { z.raw.End-- return } } } // readRawOrRCDATA reads until the next "</foo>", where "foo" is z.rawTag and // is typically something like "script" or "textarea". func (z *Tokenizer) readRawOrRCDATA() { // If <script /> or any raw tag, don't try to read any data if z.Token().Type == SelfClosingTagToken { z.data.End = z.raw.End z.rawTag = "" z.noExpressionTag = "" return } if z.rawTag == "script" { z.readScript() z.textIsRaw = true z.rawTag = "" z.noExpressionTag = "" return } loop: for { c := z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in loop: \"%v\"\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) break loop } if c != '<' { continue loop } c = z.readByte() if z.err != nil { break loop } if c != '/' { z.raw.End-- continue loop } if z.readRawEndTag() || z.err != nil { break loop } } z.data.End = z.raw.End // A textarea's or title's RCDATA can contain escaped entities. z.textIsRaw = z.rawTag != "textarea" && z.rawTag != "title" z.rawTag = "" } // readRawEndTag attempts to read a tag like "</foo>", where "foo" is z.rawTag. // If it succeeds, it backs up the input position to reconsume the tag and // returns true. Otherwise it returns false. The opening "</" has already been // consumed. func (z *Tokenizer) readRawEndTag() bool { for i := 0; i < len(z.rawTag); i++ { c := z.readByte() if z.err != nil { return false } if c != z.rawTag[i] && c != z.rawTag[i]-('a'-'A') { z.raw.End-- return false } } c := z.readByte() if z.err != nil { if z.err == io.EOF { return false } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in readRawEndTag: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return false } switch c { case ' ', '\n', '\r', '\t', '\f', '/', '>': // The 3 is 2 for the leading "</" plus 1 for the trailing character c. z.raw.End -= 3 + len(z.rawTag) return true } z.raw.End-- return false } // readScript reads until the next </script> tag, following the byzantine // rules for escaping/hiding the closing tag. func (z *Tokenizer) readScript() { defer func() { z.data.End = z.raw.End }() var c byte scriptData: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptData: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if c == '<' { goto scriptDataLessThanSign } goto scriptData scriptDataLessThanSign: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataLessThanSign: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '/': goto scriptDataEndTagOpen case '!': goto scriptDataEscapeStart } z.raw.End-- goto scriptData scriptDataEndTagOpen: if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEndTagOpen: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if z.readRawEndTag() { return } goto scriptData scriptDataEscapeStart: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscapeStart: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if c == '-' { goto scriptDataEscapeStartDash } z.raw.End-- goto scriptData scriptDataEscapeStartDash: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscapeStartDash: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if c == '-' { goto scriptDataEscapedDashDash } z.raw.End-- goto scriptData scriptDataEscaped: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscaped: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '-': goto scriptDataEscapedDash case '<': goto scriptDataEscapedLessThanSign } goto scriptDataEscaped scriptDataEscapedDash: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscapedDash: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '-': goto scriptDataEscapedDashDash case '<': goto scriptDataEscapedLessThanSign } goto scriptDataEscaped scriptDataEscapedDashDash: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscapedDashDash: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '-': goto scriptDataEscapedDashDash case '<': goto scriptDataEscapedLessThanSign case '>': goto scriptData } goto scriptDataEscaped scriptDataEscapedLessThanSign: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscapedLessThanSign: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if c == '/' { goto scriptDataEscapedEndTagOpen } if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' { goto scriptDataDoubleEscapeStart } z.raw.End-- goto scriptData scriptDataEscapedEndTagOpen: if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataEscapedEndTagOpen: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if z.readRawEndTag() || z.err != nil { return } goto scriptDataEscaped scriptDataDoubleEscapeStart: z.raw.End-- for i := 0; i < len("script"); i++ { c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataDoubleEscapeStart: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if c != "script"[i] && c != "SCRIPT"[i] { z.raw.End-- goto scriptDataEscaped } } c = z.readByte() if z.err != nil { return } switch c { case ' ', '\n', '\r', '\t', '\f', '/', '>': goto scriptDataDoubleEscaped } z.raw.End-- goto scriptDataEscaped scriptDataDoubleEscaped: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataDoubleEscaped: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '-': goto scriptDataDoubleEscapedDash case '<': goto scriptDataDoubleEscapedLessThanSign } goto scriptDataDoubleEscaped scriptDataDoubleEscapedDash: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataDoubleEscapedDash: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '-': goto scriptDataDoubleEscapedDashDash case '<': goto scriptDataDoubleEscapedLessThanSign } goto scriptDataDoubleEscaped scriptDataDoubleEscapedDashDash: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataDoubleEscapedDashDash: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } switch c { case '-': goto scriptDataDoubleEscapedDashDash case '<': goto scriptDataDoubleEscapedLessThanSign case '>': goto scriptData } goto scriptDataDoubleEscaped scriptDataDoubleEscapedLessThanSign: c = z.readByte() if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataDoubleEscapedLessThanSign: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } if c == '/' { goto scriptDataDoubleEscapeEnd } z.raw.End-- goto scriptDataDoubleEscaped scriptDataDoubleEscapeEnd: if z.readRawEndTag() { z.raw.End += len("</script>") goto scriptDataEscaped } if z.err != nil { if z.err == io.EOF { return } z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNEXPECTED_CHARACTER, Text: fmt.Sprintf("Unexpected character in scriptDataDoubleEscapeEnd: %v\n", string(c)), Range: loc.Range{ Loc: loc.Loc{Start: z.raw.End - 1}, Len: 1, }, }) return } goto scriptDataDoubleEscaped } // readHTMLComment reads the next comment token starting with "<!--". The opening // "<!--" has already been consumed. func (z *Tokenizer) readHTMLComment() { start := z.raw.End z.data.Start = start defer func() { if z.data.End < z.data.Start { // It's a comment with no data, like <!-->. z.data.End = z.data.Start } }() for dashCount := 2; ; { c := z.readByte() if z.err != nil { if z.err == io.EOF { z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNTERMINATED_HTML_COMMENT, Text: `Unterminated comment`, Range: loc.Range{ Loc: loc.Loc{Start: start}, Len: 4, }, }) } // Ignore up to two dashes at EOF. if dashCount > 2 { dashCount = 2 } z.data.End = z.raw.End - dashCount return } switch c { case '-': dashCount++ continue case '>': if dashCount >= 2 { z.data.End = z.raw.End - len("-->") return } case '!': if dashCount >= 2 { c = z.readByte() if z.err != nil { z.data.End = z.raw.End return } if c == '>' { z.data.End = z.raw.End - len("--!>") return } } } dashCount = 0 } } // readUntilCloseAngle reads until the next ">". func (z *Tokenizer) readUntilCloseAngle() { z.data.Start = z.raw.End for { c := z.readByte() if z.err != nil { z.data.End = z.raw.End return } if c == '>' { z.data.End = z.raw.End - len(">") return } } } // readString reads until a JavaScript string is closed. func (z *Tokenizer) readString(c byte) { switch c { // single quote (ends on newline) case '\'': z.readUntilChar([]byte{'\'', '\r', '\n'}) // double quote (ends on newline) case '"': z.readUntilChar([]byte{'"', '\r', '\n'}) // template literal case '`': // Note that we DO NOT have to handle `${}` here because our expression // behavior already handles `{}` and `z.readTagAttrExpression()` handles // template literals separately. z.readUntilChar([]byte{'`'}) } } // generic utility to look ahead until the first char is encountered from given splice func (z *Tokenizer) readUntilChar(chars []byte) { find_next: for { c := z.readByte() // fail on error if z.err != nil { z.data.End = z.raw.End - 1 return } // handle escape char \ if c == '\\' { z.raw.End++ c = z.buf[z.data.Start : z.data.Start+1][0] // if this is a match but it’s escaped, skip and move to the next char for _, v := range chars { if c == v { z.raw.End++ continue find_next } } } // match found! for _, v := range chars { if c == v { z.data.End = z.raw.End return } } } } // read RegExp expressions and comments (starting from '/' byte) func (z *Tokenizer) readCommentOrRegExp(boundaryChars []byte) { c := z.readByte() // find next character after '/' to know how to handle it switch c { // single-line comment (ends on newline) case '/': z.readUntilChar([]byte{'\r', '\n'}) // multi-line comment case '*': start := z.data.Start prev := c for { c = z.readByte() if z.err != nil { if z.err == io.EOF { z.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_UNTERMINATED_JS_COMMENT, Text: `Unterminated comment`, Range: loc.Range{ Loc: loc.Loc{Start: start}, Len: 2, }, }) } return } // look for "*/" if prev == '*' && c == '/' { z.data.End = z.raw.End return } prev = c } // RegExp default: z.raw.End-- z.readUntilChar(append([]byte{'/', '\r', '\n'}, boundaryChars...)) } } // readMarkupDeclaration reads the next token starting with "<!". It might be // a "<!--comment-->", a "<!DOCTYPE foo>", a "<![CDATA[section]]>" or // "<!a bogus comment". The opening "<!" has already been consumed. func (z *Tokenizer) readMarkupDeclaration() TokenType { z.data.Start = z.raw.End var c [2]byte for i := 0; i < 2; i++ { c[i] = z.readByte() if z.err != nil { z.data.End = z.raw.End return CommentToken } } if c[0] == '-' && c[1] == '-' { z.readHTMLComment() return CommentToken } z.raw.End -= 2 if z.readDoctype() { return DoctypeToken } if z.allowCDATA && z.readCDATA() { z.convertNUL = true return TextToken } // It's a bogus comment. z.readUntilCloseAngle() return CommentToken } // readDoctype attempts to read a doctype declaration and returns true if // successful. The opening "<!" has already been consumed. func (z *Tokenizer) readDoctype() bool { const s = "DOCTYPE" for i := 0; i < len(s); i++ { c := z.readByte() if z.err != nil { z.data.End = z.raw.End return false } if c != s[i] && c != s[i]+('a'-'A') { // Back up to read the fragment of "DOCTYPE" again. z.raw.End = z.data.Start return false } } if z.skipWhiteSpace(); z.err != nil { z.data.Start = z.raw.End z.data.End = z.raw.End return true } z.readUntilCloseAngle() return true } // readCDATA attempts to read a CDATA section and returns true if // successful. The opening "<!" has already been consumed. func (z *Tokenizer) readCDATA() bool { const s = "[CDATA[" for i := 0; i < len(s); i++ { c := z.readByte() if z.err != nil { z.data.End = z.raw.End return false } if c != s[i] { // Back up to read the fragment of "[CDATA[" again. z.raw.End = z.data.Start return false } } z.data.Start = z.raw.End brackets := 0 for { c := z.readByte() if z.err != nil { z.data.End = z.raw.End return true } switch c { case ']': brackets++ case '>': if brackets >= 2 { z.data.End = z.raw.End - len("]]>") return true } brackets = 0 default: brackets = 0 } } } // startTagIn returns whether the start tag in z.buf[z.data.Start:z.data.End] // case-insensitively matches any element of ss. func (z *Tokenizer) startTagIn(ss ...string) bool { loop: for _, s := range ss { if z.data.End-z.data.Start != len(s) { continue loop } for i := 0; i < len(s); i++ { c := z.buf[z.data.Start+i] if c != s[i] { continue loop } } return true } return false } func (z *Tokenizer) hasAttribute(s string) bool { for i := len(z.attr) - 1; i >= 0; i-- { x := z.attr[i] key := z.buf[x[0].Start:x[0].End] if string(key) == s { return true } } return false } // readStartTag reads the next start tag token. The opening "<a" has already // been consumed, where 'a' means anything in [A-Za-z]. func (z *Tokenizer) readStartTag() TokenType { z.readTag(true) // Several tags flag the tokenizer's next token as raw. c, raw, noExpression := z.buf[z.data.Start], false, false switch c { case 'i': raw = z.startTagIn("iframe") case 'n': raw = z.startTagIn("noembed", "noframes") case 'm': noExpression = z.startTagIn("math") case 'p': raw = z.startTagIn("plaintext") case 's': raw = z.startTagIn("script", "style") case 't': raw = z.startTagIn("textarea", "title") case 'x': raw = z.startTagIn("xmp") } if !raw { raw = z.hasAttribute("is:raw") } if raw { z.rawTag = string(z.buf[z.data.Start:z.data.End]) } if noExpression { z.noExpressionTag = string(z.buf[z.data.Start:z.data.End]) z.openBraceIsExpressionStart = false } // HTML void tags list: https://www.w3.org/TR/2011/WD-html-markup-20110113/syntax.html#syntax-elements // Also look for a self-closing token that's not in the list (e.g. "<svg><path/></svg>") if z.startTagIn("area", "base", "br", "col", "command", "embed", "hr", "img", "input", "keygen", "link", "meta", "param", "source", "track", "wbr") || z.err == nil && z.buf[z.raw.End-2] == '/' { // Reset tokenizer state for self-closing elements z.rawTag = "" return SelfClosingTagToken } // Special handling for selectedcontent - it's void but can have a closing tag in HTML if z.startTagIn("selectedcontent") && z.err == nil && z.buf[z.raw.End-2] == '/' { // Only treat as self-closing if it actually has /> z.rawTag = "" return SelfClosingTagToken } // Handle TypeScript Generics if len(z.expressionElementStack) > 0 && len(z.expressionElementStack[len(z.expressionElementStack)-1]) == 0 { if z.prevToken.Type == TextToken { tag := z.buf[z.data.Start:z.data.End] a := atom.Lookup(tag) // We can be certain this is a start tag if we match an HTML tag, Fragment, or <> if a.String() != "" || bytes.Equal(tag, []byte("Fragment")) || bytes.Equal(tag, []byte{}) { return StartTagToken } text := z.prevToken.Data originalLen := len(text) // If this "StartTagToken" does not include any spaces between it and the end of the expression // we can roughly assume it is a TypeScript generic rather than an element. Rough but it works! if len(text) != 0 && len(strings.TrimRightFunc(text, unicode.IsSpace)) == originalLen { return TextToken } } } return StartTagToken } // readUnclosedTag reads up until an unclosed tag is implicitly closed. // Without this function, the tokenizer could get stuck in infinite loops if a // user is in the middle of typing func (z *Tokenizer) readUnclosedTag() bool { buf := z.buf[z.data.Start:] var close int if z.fm == FrontmatterOpen { close = strings.Index(string(buf), "---") if close != -1 { buf = buf[0:close] } } close = bytes.Index(buf, []byte{'>'}) if close != -1 { buf = buf[0:close] } if close == -1 { // We can't find a closing tag... for i := 0; i < len(buf); i++ { c := z.readByte() if z.err != nil { z.data.End = z.raw.End return true } switch c { case ' ', '\n', '\r', '\t', '\f': // Safely read up until a whitespace character z.data.End = z.raw.End return true } } return false } return false } // readTag reads the next tag token and its attributes. If saveAttr, those // attributes are saved in z.attr, otherwise z.attr is set to an empty slice. // The opening "<a" or "</a" has already been consumed, where 'a' means anything // in [A-Za-z]. func (z *Tokenizer) readTag(saveAttr bool) { z.pendingAttrType = QuotedAttribute z.attr = z.attr[:0] z.attrTypes = z.attrTypes[:0] z.attrExpressionStack = 0 z.attrTemplateLiteralStack = make([]int, 0) z.nAttrReturned = 0 // Read the tag name and attribute key/value pairs. z.readTagName() if z.skipWhiteSpace(); z.err != nil { if z.err == io.EOF { start := z.prevToken.Loc.Start end := z.data.Start z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNCLOSED_HTML_TAG, Text: `Unclosed tag`, Range: loc.Range{ Loc: loc.Loc{Start: start}, Len: end - start, }, }) } return } for { c := z.readByte() if z.err != nil || c == '>' { break } z.raw.End-- z.readTagAttrKey() z.readTagAttrVal() // Save pendingAttr if saveAttr and that attribute has a non-empty key. if saveAttr && z.pendingAttr[0].Start != z.pendingAttr[0].End { z.attr = append(z.attr, z.pendingAttr) z.attrTypes = append(z.attrTypes, z.pendingAttrType) // Warn for common mistakes attr := z.attr[len(z.attr)-1] // Possible ...spread attribute without wrapping expression if attr[0].End-attr[0].Start > 3 { text := string(z.buf[attr[0].Start:attr[0].End]) if len(strings.TrimSpace(text)) > 3 && strings.TrimSpace(text)[0:3] == "..." { z.handler.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_INVALID_SPREAD, Text: fmt.Sprintf(`Invalid spread attribute. Did you mean %s?`, fmt.Sprintf("`{%s}`", text)), Range: loc.Range{ Loc: loc.Loc{Start: attr[0].Start}, Len: len(text), }, }) } } } if z.skipWhiteSpace(); z.err != nil { break } } } // readTagName sets z.data to the "div" in "<div k=v>". The reader (z.raw.End) // is positioned such that the first byte of the tag name (the "d" in "<div") // has already been consumed. func (z *Tokenizer) readTagName() { z.data.Start = z.raw.End - 1 for { c := z.readByte() if z.err != nil { z.data.End = z.raw.End return } switch c { case ' ', '\n', '\r', '\t', '\f': z.data.End = z.raw.End - 1 return case '/', '>': z.raw.End-- z.data.End = z.raw.End return } } } // readTagAttrKey sets z.pendingAttr[0] to the "k" in "<div k=v>". // Precondition: z.err == nil. func (z *Tokenizer) readTagAttrKey() { z.pendingAttr[0].Start = z.raw.End z.pendingAttrType = QuotedAttribute for { c := z.readByte() if z.err != nil { z.pendingAttr[0].End = z.raw.End return } switch c { case '{': z.pendingAttr[0].Start = z.raw.End z.pendingAttrType = ShorthandAttribute z.attrExpressionStack = 1 z.attrTemplateLiteralStack = append(z.attrTemplateLiteralStack, 0) z.readTagAttrExpression() pendingAttr := z.buf[z.pendingAttr[0].Start:] if trimmed := strings.TrimSpace(string(pendingAttr)); len(trimmed) > 3 { if trimmed[0:3] == "..." { z.pendingAttr[0].Start += strings.Index(string(pendingAttr), "...") + 3 z.pendingAttrType = SpreadAttribute } } continue case ' ', '\n', '\r', '\t', '\f', '/': if z.pendingAttrType == SpreadAttribute || z.pendingAttrType == ShorthandAttribute { z.pendingAttr[0].End = z.raw.End - 2 } else { z.pendingAttr[0].End = z.raw.End - 1 } return case '=', '>': z.raw.End-- if z.pendingAttrType == SpreadAttribute || z.pendingAttrType == ShorthandAttribute { z.pendingAttr[0].End = z.raw.End - 1 } else { z.pendingAttr[0].End = z.raw.End } return } } } // readTagAttrVal sets z.pendingAttr[1] to the "v" in "<div k=v>". func (z *Tokenizer) readTagAttrVal() { z.pendingAttr[1].Start = z.raw.End z.pendingAttr[1].End = z.raw.End if z.skipWhiteSpace(); z.err != nil { return } c := z.readByte() if z.err != nil { return } if c != '=' { if z.pendingAttrType == QuotedAttribute { z.pendingAttrType = EmptyAttribute } z.raw.End-- return } if z.skipWhiteSpace(); z.err != nil { return } quote := z.readByte() if z.err != nil { return } switch quote { case '>': z.raw.End-- return case '\'', '"': z.pendingAttr[1].Start = z.raw.End z.pendingAttrType = QuotedAttribute for { c := z.readByte() if z.err != nil { if z.err == io.EOF { // rescan, closing any potentially unterminated quoted attribute values for i := z.pendingAttr[1].Start; i < z.raw.End; i++ { c := z.buf[i] if unicode.IsSpace(rune(c)) || c == '/' || c == '>' { z.pendingAttr[1].End = i break } if i == z.raw.End-1 { z.pendingAttr[1].End = i break } } z.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_UNTERMINATED_STRING, Text: `Unterminated quoted attribute`, Range: loc.Range{ Loc: loc.Loc{Start: z.data.Start}, Len: z.raw.End, }, }) return } z.pendingAttr[1].End = z.raw.End return } if c == quote { z.pendingAttr[1].End = z.raw.End - 1 return } } case '`': z.pendingAttr[1].Start = z.raw.End z.pendingAttrType = TemplateLiteralAttribute for { c := z.readByte() if z.err != nil { if z.err == io.EOF { // rescan, closing any potentially unterminated attribute values for i := z.pendingAttr[1].Start; i < z.raw.End; i++ { c := z.buf[i] if unicode.IsSpace(rune(c)) || c == '/' || c == '>' { z.pendingAttr[1].End = i break } if i == z.raw.End-1 { z.pendingAttr[1].End = i break } } z.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_UNTERMINATED_STRING, Text: `Unterminated template literal attribute`, Range: loc.Range{ Loc: loc.Loc{Start: z.data.Start}, Len: z.raw.End, }, }) return } z.pendingAttr[1].End = z.raw.End return } if c == quote { z.pendingAttr[1].End = z.raw.End - 1 return } } case '{': z.pendingAttr[1].Start = z.raw.End z.pendingAttrType = ExpressionAttribute z.attrExpressionStack = 1 z.attrTemplateLiteralStack = append(z.attrTemplateLiteralStack, 0) z.readTagAttrExpression() z.pendingAttr[1].End = z.raw.End - 1 return default: z.pendingAttr[1].Start = z.raw.End - 1 z.pendingAttrType = QuotedAttribute for { c := z.readByte() if z.err != nil { z.pendingAttr[1].End = z.raw.End return } switch c { case ' ', '\n', '\r', '\t', '\f': z.pendingAttr[1].End = z.raw.End - 1 return case '>': z.raw.End-- z.pendingAttr[1].End = z.raw.End return } } } } func (z *Tokenizer) allTagAttrExpressionsClosed() bool { for i := len(z.attrTemplateLiteralStack); i > 0; i-- { item := z.attrTemplateLiteralStack[i-1] if item != 0 { return false } } return true } func (z *Tokenizer) readTagAttrExpression() { if z.err != nil { return } for { c := z.readByte() if z.err != nil { return } switch c { case '`': current := 0 if len(z.attrTemplateLiteralStack) >= z.attrExpressionStack { current = z.attrTemplateLiteralStack[z.attrExpressionStack-1] } if current > 0 { z.attrTemplateLiteralStack[z.attrExpressionStack-1]-- } else { z.attrTemplateLiteralStack[z.attrExpressionStack-1]++ } // Handle comments, strings within attrs case '/', '"', '\'': if z.attrTemplateLiteralStack[z.attrExpressionStack-1] != 0 && c == '/' { continue } inTemplateLiteral := len(z.attrTemplateLiteralStack) >= z.attrExpressionStack && z.attrTemplateLiteralStack[z.attrExpressionStack-1] > 0 if inTemplateLiteral { continue } end := z.data.End if c == '/' { // Also stop when we hit a '}' character (end of attribute expression) z.readCommentOrRegExp([]byte{'}'}) // If we exit on a '}', ignore the final character here lastChar := z.buf[z.data.End-1 : z.data.End][0] if lastChar == '}' { z.data.End-- } } else { z.readString(c) } z.raw.End = z.data.End z.data.End = end case '{': previousChar := z.buf[z.raw.End-2] inTemplateLiteral := len(z.attrTemplateLiteralStack) >= z.attrExpressionStack && z.attrTemplateLiteralStack[z.attrExpressionStack-1] > 0 if !inTemplateLiteral || previousChar == '$' { z.attrExpressionStack++ z.attrTemplateLiteralStack = append(z.attrTemplateLiteralStack, 0) } case '}': inTemplateLiteral := len(z.attrTemplateLiteralStack) >= z.attrExpressionStack && z.attrTemplateLiteralStack[z.attrExpressionStack-1] > 0 if !inTemplateLiteral { z.attrExpressionStack-- if z.attrExpressionStack == 0 && z.allTagAttrExpressionsClosed() { return } } } } } func (z *Tokenizer) Loc() loc.Loc { return loc.Loc{Start: z.data.Start} } // An expression boundary means the next tokens should be treated as a JS expression // (_do_ handle strings, comments, regexp, etc) rather than as plain text func (z *Tokenizer) isAtExpressionBoundary() bool { if len(z.expressionStack) == 0 { return false } return len(z.expressionElementStack[len(z.expressionElementStack)-1]) == 0 } func (z *Tokenizer) trackExpressionElementStack() { if len(z.expressionStack) == 0 { return } i := len(z.expressionElementStack) - 1 if z.tt == StartTagToken { z.expressionElementStack[i] = append(z.expressionElementStack[i], string(z.buf[z.data.Start:z.data.End])) } else if z.tt == EndTagToken { stack := z.expressionElementStack[i] if len(stack) > 0 { for j := 1; j < len(stack)+1; j++ { tok := stack[len(stack)-j] if tok == string(z.buf[z.data.Start:z.data.End]) { // When stack is balanced, reset `openBraceIsExpressionStart` if len(stack) == 1 { z.expressionElementStack[i] = make([]string, 0) z.openBraceIsExpressionStart = false } else { z.expressionElementStack[i] = stack[:len(stack)-1] } } } } } else if z.tt == SelfClosingTagToken { stack := z.expressionElementStack[i] if len(stack) == 0 { // Only switch out of this mode if we're not in an active stack z.openBraceIsExpressionStart = false } } } // Next scans the next token and returns its type. func (z *Tokenizer) Next() TokenType { z.prevToken = z.Token() z.raw.Start = z.raw.End z.data.Start = z.raw.End z.data.End = z.raw.End defer z.trackExpressionElementStack() if z.rawTag != "" { if z.rawTag == "plaintext" { // Read everything up to EOF. for z.err == nil { z.readByte() } z.data.End = z.raw.End z.textIsRaw = true } else if z.rawTag == "title" || z.rawTag == "textarea" { goto raw_with_expression_loop } else { z.readRawOrRCDATA() } if z.data.End > z.data.Start { z.tt = TextToken z.convertNUL = true return z.tt } } z.textIsRaw = false z.convertNUL = false if z.fm != FrontmatterClosed { goto frontmatter_loop } if z.isAtExpressionBoundary() { goto expression_loop } loop: for { c := z.readByte() if z.err != nil { break loop } var tokenType TokenType if c == '{' || c == '}' { if x := z.raw.End - len("{"); z.raw.Start < x { z.raw.End = x z.data.End = x z.tt = TextToken return z.tt } z.raw.End-- goto expression_loop } if c == '-' && z.fm != FrontmatterClosed { z.raw.End-- goto frontmatter_loop } if c != '<' { continue loop } if z.fm == FrontmatterOpen { z.raw.End-- goto frontmatter_loop } // Check if the '<' we have just read is part of a tag, comment // or doctype. If not, it's part of the accumulated text token. c = z.readByte() if z.err != nil { break loop } z.openBraceIsExpressionStart = z.noExpressionTag == "" // Empty <> Fragment start tag if c == '>' { if x := z.raw.End - len("<>"); z.raw.Start < x { z.raw.End = x z.data.End = x z.tt = TextToken return z.tt } z.tt = StartTagToken return z.tt } switch { case 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z': tokenType = StartTagToken case c == '/': tokenType = EndTagToken case c == '!' || c == '?': // We use CommentToken to mean any of "<!--actual comments-->", // "<!DOCTYPE declarations>" and "<?xml processing instructions?>". tokenType = CommentToken default: raw := z.Raw() // Error: encountered an attempted use of <> syntax with attributes, like `< slot="named">Hello world!</>` if len(raw) > 1 && unicode.IsSpace(rune(raw[0])) { element := bytes.Split(z.Buffered(), []byte{'>'}) incorrect := fmt.Sprintf("< %s>", element[0]) correct := fmt.Sprintf("<Fragment %s>", element[0]) z.handler.AppendError(&loc.ErrorWithRange{ Code: loc.ERROR_FRAGMENT_SHORTHAND_ATTRS, Text: `Unable to assign attributes when using <> Fragment shorthand syntax!`, Range: loc.Range{Loc: loc.Loc{Start: z.raw.End - 2}, Len: 3 + len(element[0])}, Hint: fmt.Sprintf("To fix this, please change %s to use the longhand Fragment syntax: %s", incorrect, correct), }) } // Reconsume the current character. z.raw.End-- continue } // We have a non-text token, but we might have accumulated some text // before that. If so, we return the text first, and return the non- // text token on the subsequent call to Next. if x := z.raw.End - len("<a"); z.raw.Start < x { z.raw.End = x z.data.End = x z.tt = TextToken return z.tt } // If necessary, implicitly close an unclosed tag to bail out before // an infinite loop occurs. Helpful for IDEs which compile as user types. if x := z.readUnclosedTag(); x { z.tt = TextToken return z.tt } switch tokenType { case StartTagToken: // If we see an element before "---", ignore any future "---" if z.fm == FrontmatterInitial { z.fm = FrontmatterClosed } z.tt = z.readStartTag() return z.tt case EndTagToken: // If we see an element before "---", ignore any future "---" if z.fm == FrontmatterInitial { z.fm = FrontmatterClosed } c = z.readByte() if z.err != nil { break loop } if c == '>' { z.tt = EndTagToken return z.tt } if 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' { z.readTag(false) tagName := string(z.buf[z.data.Start:z.data.End]) if tagName == z.noExpressionTag { // out of the tag block z.noExpressionTag = "" } if z.err != nil { z.tt = ErrorToken } else { z.tt = EndTagToken } return z.tt } z.raw.End-- z.tt = CommentToken return z.tt case CommentToken: if c == '!' { z.tt = z.readMarkupDeclaration() return z.tt } z.raw.End-- z.readUntilCloseAngle() z.tt = CommentToken return z.tt } } if z.raw.Start < z.raw.End { // We're scanning Text, so open braces should be ignored z.openBraceIsExpressionStart = false z.data.End = z.raw.End z.tt = TextToken return z.tt } z.tt = ErrorToken return z.tt frontmatter_loop: for { if z.fm == FrontmatterClosed { goto loop } c := z.readByte() if z.err != nil { break frontmatter_loop } // handle frontmatter fence if c == '-' { z.dashCount++ // increase dashCount with each consecutive "-" } if z.dashCount == 3 { switch z.fm { case FrontmatterInitial: z.fm = FrontmatterOpen z.dashCount = 0 z.data.Start = z.raw.End - len("---") z.data.End = z.raw.End z.tt = FrontmatterFenceToken z.openBraceIsExpressionStart = false return z.tt case FrontmatterOpen: if z.raw.Start < z.raw.End-len("---") { z.data.End = z.raw.End - len("---") z.openBraceIsExpressionStart = false z.tt = TextToken return z.tt } z.fm = FrontmatterClosed z.dashCount = 0 z.data.End = z.raw.End z.tt = FrontmatterFenceToken z.openBraceIsExpressionStart = z.noExpressionTag == "" return z.tt } } if c == '-' { continue frontmatter_loop } // JS Comment or RegExp if c == '/' { z.readCommentOrRegExp([]byte{}) z.tt = TextToken z.data.End = z.raw.End return z.tt } s := z.buf[z.raw.Start : z.raw.Start+1][0] if s == '<' || s == '{' || s == '}' || c == '<' || c == '{' || c == '}' { z.dashCount = 0 if z.fm == FrontmatterOpen && (s == '<' || c == '<') { // Do not support elements inside of frontmatter! continue frontmatter_loop } else { z.raw.End-- goto loop } } // handle string if c == '\'' || c == '"' || c == '`' { z.readString(c) z.tt = TextToken z.data.End = z.raw.End return z.tt } z.dashCount = 0 continue frontmatter_loop } z.data.End = z.raw.End raw_with_expression_loop: for { c := z.readByte() if z.err != nil { break raw_with_expression_loop } // handle string if c == '`' { z.readString(c) z.tt = TextToken z.data.End = z.raw.End return z.tt } if c == '{' || c == '}' { if x := z.raw.End - len("{"); z.raw.Start < x { z.raw.End = x z.data.End = x z.tt = TextToken return z.tt } z.raw.End-- goto expression_loop } if c != '<' { continue raw_with_expression_loop } c = z.readByte() if z.err != nil { break raw_with_expression_loop } if c != '/' { z.raw.End-- continue raw_with_expression_loop } if z.readRawEndTag() || z.err != nil { break raw_with_expression_loop } } z.data.End = z.raw.End // A textarea's or title's RCDATA can contain escaped entities. z.textIsRaw = z.rawTag != "textarea" && z.rawTag != "title" z.rawTag = "" expression_loop: for { c := z.readByte() if z.err != nil { break expression_loop } // JS Comment or RegExp if c == '/' { boundaryChars := []byte{'{', '}', '\'', '"', '`'} z.readCommentOrRegExp(boundaryChars) // If we exit on a '}', ignore the final character here lastChar := z.buf[z.data.End-1 : z.data.End][0] for _, c := range boundaryChars { if lastChar == c { z.raw.End-- } } z.data.End = z.raw.End z.tt = TextToken return z.tt } // handle string if c == '\'' || c == '"' || c == '`' { z.readString(c) z.tt = TextToken z.data.End = z.raw.End return z.tt } if c == '<' { // Check next byte to see if this is an element or a JS expression. // Note: this is not a perfect check, just good enough for most cases! c1 := z.readByte() if z.err != nil { break expression_loop } if unicode.IsSpace(rune(c1)) || unicode.IsNumber(rune(c1)) { continue } // Otherwise, we have an element. Reset pointer and try again. z.raw.End -= 2 z.data.End = z.raw.End if z.rawTag != "" { goto raw_with_expression_loop } else { goto loop } } if c != '{' && c != '}' { continue expression_loop } if x := z.raw.End - len("{"); z.raw.Start < x { z.raw.End = x z.data.End = x z.tt = TextToken return z.tt } switch c { case '{': if z.openBraceIsExpressionStart { z.openBraceIsExpressionStart = false z.expressionStack = append(z.expressionStack, 0) z.expressionElementStack = append(z.expressionElementStack, make([]string, 0)) z.data.End = z.raw.End - 1 z.tt = StartExpressionToken return z.tt } else { if len(z.expressionStack) > 0 { z.expressionStack[len(z.expressionStack)-1]++ } z.data.End = z.raw.End z.tt = TextToken return z.tt } case '}': if len(z.expressionStack) == 0 { z.data.End = z.raw.End z.tt = TextToken return z.tt } z.expressionStack[len(z.expressionStack)-1]-- if z.expressionStack[len(z.expressionStack)-1] == -1 { z.openBraceIsExpressionStart = z.noExpressionTag == "" z.expressionStack = z.expressionStack[0 : len(z.expressionStack)-1] z.expressionElementStack = z.expressionElementStack[0 : len(z.expressionElementStack)-1] z.data.End = z.raw.End z.tt = EndExpressionToken return z.tt } } } if z.raw.Start < z.raw.End { z.data.End = z.raw.End z.tt = TextToken return z.tt } z.tt = ErrorToken return z.tt } // Raw returns the unmodified text of the current token. Calling Next, Token, // Text, TagName or TagAttr may change the contents of the returned slice. // // The token stream's raw bytes partition the byte stream (up until an // ErrorToken). There are no overlaps or gaps between two consecutive token's // raw bytes. One implication is that the byte offset of the current token is // the sum of the lengths of all previous tokens' raw bytes. func (z *Tokenizer) Raw() []byte { return z.buf[z.raw.Start:z.raw.End] } var ( nul = []byte("\x00") replacement = []byte("\ufffd") ) // Text returns the unescaped text of a text, comment or doctype token. The // contents of the returned slice may change on the next call to Next. func (z *Tokenizer) Text() []byte { switch z.tt { case TextToken, CommentToken, DoctypeToken: s := z.buf[z.data.Start:z.data.End] z.data.Start = z.raw.End z.data.End = z.raw.End if (z.convertNUL || z.tt == CommentToken) && bytes.Contains(s, nul) { s = bytes.Replace(s, nul, replacement, -1) } // Do not unescape text, leave it raw for the browser // if !z.textIsRaw { // s = unescape(s, false) // } return s } return nil } // TagName returns the lower-cased name of a tag token (the `img` out of // `<IMG SRC="foo">`) and whether the tag has attributes. // The contents of the returned slice may change on the next call to Next. func (z *Tokenizer) TagName() (name []byte, hasAttr bool) { if z.data.Start < z.data.End { switch z.tt { case StartTagToken, EndTagToken, SelfClosingTagToken: s := z.buf[z.data.Start:z.data.End] z.data.Start = z.raw.End z.data.End = z.raw.End return s, z.nAttrReturned < len(z.attr) } } return nil, false } // TagAttr returns the lower-cased key and unescaped value of the next unparsed // attribute for the current tag token and whether there are more attributes. // The contents of the returned slices may change on the next call to Next. func (z *Tokenizer) TagAttr() (key []byte, keyLoc loc.Loc, val []byte, valLoc loc.Loc, attrType AttributeType, moreAttr bool) { if z.nAttrReturned < len(z.attr) { switch z.tt { case StartTagToken, SelfClosingTagToken: x := z.attr[z.nAttrReturned] attrType := z.attrTypes[z.nAttrReturned] z.nAttrReturned++ key = z.buf[x[0].Start:x[0].End] val = z.buf[x[1].Start:x[1].End] keyLoc := loc.Loc{Start: x[0].Start} valLoc := loc.Loc{Start: x[1].Start} var attrVal []byte if attrType == ExpressionAttribute { attrVal = val } else { attrVal = unescape(val, true) } return key, keyLoc, attrVal, valLoc, attrType, z.nAttrReturned < len(z.attr) } } return nil, loc.Loc{Start: 0}, nil, loc.Loc{Start: 0}, QuotedAttribute, false } // Token returns the current Token. The result's Data and Attr values remain // valid after subsequent Next calls. func (z *Tokenizer) Token() Token { t := Token{Type: z.tt, Loc: z.Loc()} switch z.tt { case StartExpressionToken: t.Data = "{" case EndExpressionToken: t.Data = "}" case TextToken, CommentToken, DoctypeToken: t.Data = string(z.Text()) case StartTagToken, SelfClosingTagToken, EndTagToken: name, moreAttr := z.TagName() for moreAttr { var key, val []byte var keyLoc, valLoc loc.Loc var attrType AttributeType var attrTokenizer *Tokenizer = nil key, keyLoc, val, valLoc, attrType, moreAttr = z.TagAttr() t.Attr = append(t.Attr, Attribute{"", atom.String(key), keyLoc, string(val), valLoc, attrTokenizer, attrType}) } if isFragment(string(name)) || isComponent(string(name)) { t.DataAtom, t.Data = 0, string(name) } else if a := atom.Lookup(name); a != 0 { t.DataAtom, t.Data = a, a.String() } else { t.DataAtom, t.Data = 0, string(name) } } return t } // NewTokenizer returns a new HTML Tokenizer for the given Reader. // The input is assumed to be UTF-8 encoded. func NewTokenizer(r io.Reader) *Tokenizer { return NewTokenizerFragment(r, "") } // NewTokenizerFragment returns a new HTML Tokenizer for the given Reader, for // tokenizing an existing element's InnerHTML fragment. contextTag is that // element's tag, such as "div" or "iframe". // // For example, how the InnerHTML "a<b" is tokenized depends on whether it is // for a <p> tag or a <script> tag. // // The input is assumed to be UTF-8 encoded. func NewTokenizerFragment(r io.Reader, contextTag string) *Tokenizer { buf := new(bytes.Buffer) buf.ReadFrom(r) z := &Tokenizer{ r: r, buf: buf.Bytes(), fm: FrontmatterInitial, openBraceIsExpressionStart: true, } if contextTag != "" { switch s := strings.ToLower(contextTag); s { case "iframe", "noembed", "noframes", "plaintext", "script", "style", "title", "textarea", "xmp": z.rawTag = s } } return z } ================================================ FILE: internal/token_test.go ================================================ package astro import ( "reflect" "strings" "testing" "github.com/withastro/compiler/internal/test_utils" ) type TokenTypeTest struct { name string input string expected []TokenType } type AttributeTest struct { name string input string expected []AttributeType } type LocTest struct { name string input string expected []int } func TestBasic(t *testing.T) { Basic := []TokenTypeTest{ { "doctype", `<!DOCTYPE html>`, []TokenType{DoctypeToken}, }, { "start tag", `<html>`, []TokenType{StartTagToken}, }, { "dot component", `<pkg.Item>`, []TokenType{StartTagToken}, }, { "noscript component", `<noscript><Component /></noscript>`, []TokenType{StartTagToken, SelfClosingTagToken, EndTagToken}, }, { "end tag", `</html>`, []TokenType{EndTagToken}, }, { "unclosed tag", `<components.`, []TokenType{TextToken}, }, { "self-closing tag (slash)", `<meta charset="utf-8" />`, []TokenType{SelfClosingTagToken}, }, { "self-closing title", `<title set:html={} /><div></div>`, []TokenType{SelfClosingTagToken, StartTagToken, EndTagToken}, }, { "self-closing tag (no slash)", `<img width="480" height="320">`, []TokenType{SelfClosingTagToken}, }, { "text", `Hello@`, []TokenType{TextToken}, }, { "self-closing script", `<script />`, []TokenType{SelfClosingTagToken}, }, { "self-closing script with sibling", `<script /><div></div><div />`, []TokenType{SelfClosingTagToken, StartTagToken, EndTagToken, SelfClosingTagToken}, }, { "self-closing style", `<style />`, []TokenType{SelfClosingTagToken}, }, { "self-closing style with sibling", `<style /><div></div><div />`, []TokenType{SelfClosingTagToken, StartTagToken, EndTagToken, SelfClosingTagToken}, }, { "attribute with quoted template literal", "<a :href=\"`/home`\">Home</a>", []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "No expressions inside math", `<math>{test}</math>`, []TokenType{StartTagToken, TextToken, TextToken, TextToken, EndTagToken}, }, { "No expressions inside math (complex)", `<span><math xmlns="http://www.w3.org/1998/Math/MathML"><mo>4</mo><mi /><semantics><annotation encoding="application/x-tex">\sqrt {x}</annotation></semantics></math></span>`, []TokenType{StartTagToken, StartTagToken, StartTagToken, TextToken, EndTagToken, SelfClosingTagToken, StartTagToken, StartTagToken, TextToken, TextToken, TextToken, TextToken, EndTagToken, EndTagToken, EndTagToken, EndTagToken}, }, { "Expression attributes allowed inside math", `<math set:html={test} />`, []TokenType{SelfClosingTagToken}, }, { "SVG (self-closing)", `<svg><path/></svg>`, []TokenType{StartTagToken, SelfClosingTagToken, EndTagToken}, }, { "SVG (left open)", `<svg><path></svg>`, // note: this test isn’t “ideal” it’s just testing current behavior []TokenType{StartTagToken, StartTagToken, EndTagToken}, }, { "SVG with style", `<svg><style> #fire { fill: orange; stroke: purple; } .wordmark { fill: black; } </style><path id="#fire" d="M0,0 M340,29"></path><path class="wordmark" d="M0,0 M340,29"></path></svg>`, []TokenType{StartTagToken, StartTagToken, TextToken, EndTagToken, StartTagToken, EndTagToken, StartTagToken, EndTagToken, EndTagToken}, }, { "form element with expression follwed by another form", `<form>{data.formLabelA}</form><form><button></button></form>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, StartTagToken, StartTagToken, EndTagToken, EndTagToken}, }, { "text", "test", []TokenType{TextToken}, }, { "comment", `<!-- comment -->`, []TokenType{CommentToken}, }, { "top-level expression", `{ value }`, []TokenType{StartExpressionToken, TextToken, EndExpressionToken}, }, { "expression inside element", `<div>{ value }</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with solidus inside element", `<div>{ 16 / 4 }</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with strings inside element", `<div>{ "string" + 16 / 4 + "}" }</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression inside component", `<Component>{items.map(item => <div>{item}</div>)}</Component>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression inside component with quoted attr", `<Component a="b">{items.map(item => <div>{item}</div>)}</Component>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression inside component with expression attr", `<Component data={data}>{items.map(item => <div>{item}</div>)}</Component>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression inside component with named expression attr", `<Component named={data}>{items.map(item => <div>{item}</div>)}</Component>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with multiple returns", `<div>{() => { let generate = (input) => { let a = () => { return; }; let b = () => { return; }; let c = () => { return; }; }; }}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with multiple elements", `<div>{() => { if (value > 0.25) { return <span>Default</span> } else if (value > 0.5) { return <span>Another</span> } else if (value > 0.75) { return <span>Other</span> } return <span>Yet Other</span> }}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with multiple elements returning self closing tags", `<div>{()=>{ if (true) { return <hr />; }; if (true) { return <img />; } }}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression returning a mix of self-closing tags and elements", `<div>{() => { if (value > 0.25) { return <br /> } else if (value > 0.5) { return <hr /> } else if (value > 0.75) { return <div /> } return <div>Yaaay</div> }}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with switch returning a mix of self-closing tags and elements", `<div>{items.map(({ type, ...data }) => { switch (type) { case 'card': { return (<Card {...data} />);}case 'paragraph': { return (<p>{data.body}</p>);}}})}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with < operators", `<div>{() => { if (value < 0.25) { return <span>Default</span> } else if (value <0.5) { return <span>Another</span> } else if (value < 0.75) { return <span>Other</span> } return <span>Yet Other</span> }}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "attribute expression with quoted braces", `<div value={"{"} />`, []TokenType{SelfClosingTagToken}, }, { "attribute expression with solidus", `<div value={100 / 2} />`, []TokenType{SelfClosingTagToken}, }, { "attribute expression with solidus inside template literal", "<div value={attr ? `a/b` : \"c\"} />", []TokenType{SelfClosingTagToken}, }, { "complex attribute expression", "<div value={`${attr ? `a/b ${`c ${`d ${cool}`}`}` : \"d\"} awesome`} />", []TokenType{SelfClosingTagToken}, }, { "attribute expression with solidus no spaces", `<div value={(100/2)} />`, []TokenType{SelfClosingTagToken}, }, { "attribute expression with quote", `<div value={/* hello */} />`, []TokenType{SelfClosingTagToken}, }, { "JSX-style comment inside element", `<div {/* hello */} a=b />`, []TokenType{SelfClosingTagToken}, }, { "quotes within textContent", `<p>can't</p>`, []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "apostrophe within title", `<title>Astro's</title>`, []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "quotes within title", `<title>My Astro "Website"</title>`, []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "textarea inside expression", ` {bool && <textarea>It was a dark and stormy night...</textarea>} {bool && <input>} `, []TokenType{StartExpressionToken, TextToken, StartTagToken, TextToken, EndTagToken, EndExpressionToken, TextToken, StartExpressionToken, TextToken, SelfClosingTagToken, EndExpressionToken, TextToken}, }, { "text containing a /", "<span>next/router</span>", []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "iframe allows attributes", "<iframe src=\"https://google.com\"></iframe>", []TokenType{StartTagToken, EndTagToken}, }, { "is:raw allows children to be parsed as Text", "<span is:raw>function foo() { }</span>", []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "is:raw treats all children as raw text", "<Fragment is:raw><ul></ue></Fragment>", []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "is:raw treats all children as raw text", "<Fragment is:raw><ul></ue></Fragment>", []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "is:raw allows other attributes", "<span data-raw={true} is:raw><%= Hi =%></span>", []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "Doesn't throw on other data attributes", "<span data-foo></span>", []TokenType{StartTagToken, EndTagToken}, }, { "Doesn't work if attr is named data", "<span data>{Hello}</span>", []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, { "Supports <style> inside of <svg>", `<svg><style><div>:root { color: red; }</style></svg>`, []TokenType{StartTagToken, StartTagToken, TextToken, EndTagToken, EndTagToken}, }, { "multiple scoped :global", `<style>:global(test-2) {}</style><style>test-1{}</style>`, []TokenType{StartTagToken, TextToken, EndTagToken, StartTagToken, TextToken, EndTagToken}, }, { "multiple styles", `<style global>a {}</style><style>b {}</style><style>c {}</style>`, []TokenType{StartTagToken, TextToken, EndTagToken, StartTagToken, TextToken, EndTagToken, StartTagToken, TextToken, EndTagToken}, }, { "element with single quote", `<div>Don't panic</div>`, []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "fragment", `<>foo</>`, []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "fragment shorthand", `<h1>A{cond && <>item <span>{text}</span></>}</h1>`, []TokenType{StartTagToken, TextToken, StartExpressionToken, TextToken, StartTagToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, EndTagToken, EndExpressionToken, EndTagToken}, }, { "fragment", `<Fragment>foo</Fragment>`, []TokenType{StartTagToken, TextToken, EndTagToken}, }, { "fragment shorthand in nested expression", `<div>{x.map((x) => (<>{x ? "truthy" : "falsy"}</>))}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "select with expression", `<select>{[1, 2, 3].map(num => <option>{num}</option>)}</select>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "select with expression", `<select>{[1, 2, 3].map(num => <option>{num}</option>)}</select><div>Hello</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken, StartTagToken, TextToken, EndTagToken}, }, { "selectedcontent element", `<select><button><selectedcontent></selectedcontent></button><option>A</option></select>`, []TokenType{StartTagToken, StartTagToken, StartTagToken, EndTagToken, EndTagToken, StartTagToken, TextToken, EndTagToken, EndTagToken}, }, { "selectedcontent self-closing", `<select><button><selectedcontent /></button><option>A</option></select>`, []TokenType{StartTagToken, StartTagToken, SelfClosingTagToken, EndTagToken, StartTagToken, TextToken, EndTagToken, EndTagToken}, }, { "single open brace", "<main id={`{`}></main>", []TokenType{StartTagToken, EndTagToken}, }, { "single close brace", "<main id={`}`}></main>", []TokenType{StartTagToken, EndTagToken}, }, { "extra close brace", "<main id={`${}}`}></main>", []TokenType{StartTagToken, EndTagToken}, }, { "Empty expression", "({})", []TokenType{TextToken, StartExpressionToken, EndExpressionToken, TextToken}, }, { "expression after text", `<h1>A{cond && <span>Test {text}</span>}</h1>`, []TokenType{StartTagToken, TextToken, StartExpressionToken, TextToken, StartTagToken, TextToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, EndExpressionToken, EndTagToken}, }, { "expression surrounded by text", `<h1>A{cond && <span>Test {text} Cool</span>}</h1>`, []TokenType{StartTagToken, TextToken, StartExpressionToken, TextToken, StartTagToken, TextToken, StartExpressionToken, TextToken, EndExpressionToken, TextToken, EndTagToken, EndExpressionToken, EndTagToken}, }, { "switch statement", `<div>{() => { switch(value) { case 'a': return <A></A>; case 'b': return <B />; case 'c': return <C></C> }}}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, EndTagToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, StartTagToken, EndTagToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "switch statement with expression", `<div>{() => { switch(value) { case 'a': return <A>{value}</A>; case 'b': return <B />; case 'c': return <C>{value.map(i => <span>{i}</span>)}</C> }}}</div>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, SelfClosingTagToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken}, }, { "attribute expression with unmatched quotes", "<h1 set:text={`Houston we've got a problem`}></h1>", []TokenType{StartTagToken, EndTagToken}, }, { "attribute expression with unmatched quotes", "<h1 set:html={`Oh \"no...`}></h1>", []TokenType{StartTagToken, EndTagToken}, }, { "attribute expression with unmatched quotes inside matched quotes", "<h1 set:html={\"hello y'all\"}></h1>", []TokenType{StartTagToken, EndTagToken}, }, { "attribute expression with unmatched quotes inside matched quotes II", "<h1 set:html={'\"Did Nate handle this case, too?\", Fred pondered...'}></h1>", []TokenType{StartTagToken, EndTagToken}, }, { "typescript generic", `<ul>{items.map((item: Item<Checkbox>)) => <li>{item.checked}</li>)}</ul>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "typescript generic II", `<ul>{items.map((item: Item<Checkbox>)) => <Checkbox>{item.checked}</Checkbox>)}</ul>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, EndTagToken}, }, { "incomplete tag", `<MyAstroComponent`, []TokenType{TextToken}, }, { "incomplete tag II", `<MyAstroComponent` + "\n", []TokenType{TextToken}, }, { "incomplete tag III", `<div></div><MyAstroComponent` + "\n", []TokenType{StartTagToken, EndTagToken, TextToken}, }, } runTokenTypeTest(t, Basic) } func TestFrontmatter(t *testing.T) { Frontmatter := []TokenTypeTest{ { "simple token", `---`, []TokenType{FrontmatterFenceToken}, }, { "basic case", ` --- const a = 0; --- `, []TokenType{FrontmatterFenceToken, TextToken, FrontmatterFenceToken}, }, { "ignores leading whitespace", ` --- const a = 0; --- `, []TokenType{FrontmatterFenceToken, TextToken, FrontmatterFenceToken}, }, { "allows leading comments", ` <!-- Why? Who knows! --> --- const a = 0; --- `, []TokenType{CommentToken, FrontmatterFenceToken, TextToken, FrontmatterFenceToken}, }, { "treated as text after element", ` <div /> --- const a = 0; --- `, []TokenType{SelfClosingTagToken, TextToken}, }, { "treated as text after closed", ` --- const a = 0; --- <div> --- </div> `, []TokenType{FrontmatterFenceToken, TextToken, FrontmatterFenceToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken}, }, { "does not tokenize elements inside", ` --- const a = <div />; --- `, []TokenType{FrontmatterFenceToken, TextToken, TextToken, FrontmatterFenceToken}, }, { "no elements or expressions in frontmatter", ` --- const contents = "foo"; const a = <div>{contents}</div>; --- `, []TokenType{FrontmatterFenceToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, FrontmatterFenceToken}, }, { "brackets within frontmatter treated as text", ` --- const someProps = { count: 0, } --- `, []TokenType{FrontmatterFenceToken, TextToken, TextToken, TextToken, TextToken, TextToken, FrontmatterFenceToken}, }, { "frontmatter tags and brackets all treated as text", ` --- const contents = "foo"; const a = <ul>{contents}</ul> const someProps = { count: 0, } --- `, []TokenType{FrontmatterFenceToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, FrontmatterFenceToken}, }, { "less than isn’t a tag", ` --- const a = 2; const div = 4 const isBigger = a < div; --- `, []TokenType{FrontmatterFenceToken, TextToken, FrontmatterFenceToken}, }, { "less than attr", `<div aria-hidden={count < 1} />`, []TokenType{SelfClosingTagToken}, }, { "greater than attr", `<div aria-hidden={count > 1} />`, []TokenType{SelfClosingTagToken}, }, { "greater than attr inside expression", `{values.map(value => <div aria-hidden={count > 1} />)}`, []TokenType{StartExpressionToken, TextToken, SelfClosingTagToken, TextToken, EndExpressionToken}, }, { "single-line comments", ` --- // --- <div> --- `, []TokenType{FrontmatterFenceToken, TextToken, TextToken, FrontmatterFenceToken}, }, { "multi-line comments", ` --- /* --- <div> */ --- `, []TokenType{FrontmatterFenceToken, TextToken, TextToken, FrontmatterFenceToken}, }, { "RegExp", `--- const RegExp = /---< > > { }; import thing from "thing"; / --- {html}`, []TokenType{FrontmatterFenceToken, TextToken, TextToken, FrontmatterFenceToken, TextToken, StartExpressionToken, TextToken, EndExpressionToken}, }, { "RegExp with Escape", `--- export async function getStaticPaths() { const pattern = /\.md$/g; } --- <div />`, []TokenType{FrontmatterFenceToken, TextToken, TextToken, TextToken, TextToken, TextToken, TextToken, FrontmatterFenceToken, SelfClosingTagToken}, }, { "textarea", `<textarea>{html}</textarea>`, []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, // { // "less than with no space isn’t a tag", // ` // --- // const a = 2; // const div = 4 // const isBigger = a <div // --- // `, // []TokenType{FrontmatterFenceToken, TextToken, FrontmatterFenceToken}, // }, } runTokenTypeTest(t, Frontmatter) } func TestExpressions(t *testing.T) { Expressions := []TokenTypeTest{ { "simple expression", `{value}`, []TokenType{StartExpressionToken, TextToken, EndExpressionToken}, }, { "object expression", `{{ value }}`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, EndExpressionToken}, }, { "tag expression", `{<div />}`, []TokenType{StartExpressionToken, SelfClosingTagToken, EndExpressionToken}, }, { "string expression", `{"<div {attr} />"}`, []TokenType{StartExpressionToken, TextToken, EndExpressionToken}, }, { "function expression", `{() => { return value }}`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, TextToken, EndExpressionToken}, }, { "nested one level", `{() => { return <div>{value}</div> }}`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, EndExpressionToken}, }, { "nested one level with self-closing tag before expression", `{() => { return <div><div />{value}</div> }}`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, SelfClosingTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, EndExpressionToken}, }, { "nested two levels", `{() => { return <div>{() => { return value }}</div> }}`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, EndExpressionToken}, }, { "nested two levels with tag", `{() => { return <div>{() => { return <div>{value}</div> }}</div> }}`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, EndExpressionToken, EndTagToken, TextToken, TextToken, EndExpressionToken}, }, { "expression map", `<div> {items.map((item) => ( // < > < } <div>{item}</div> ))} </div>`, []TokenType{StartTagToken, TextToken, StartExpressionToken, TextToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken, TextToken, EndExpressionToken, TextToken, EndTagToken}, }, { "left bracket within string", `{"{"}`, []TokenType{StartExpressionToken, TextToken, EndExpressionToken}, }, { "right bracket within string", `{'}'}`, []TokenType{StartExpressionToken, TextToken, EndExpressionToken}, }, { "expression within string", `{'{() => <Component />}'}`, []TokenType{StartExpressionToken, TextToken, EndExpressionToken}, }, { "expression within single-line comment", `{ // < > < } 'text' }`, []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, EndExpressionToken}, }, { "expression within multi-line comment", `{/* < > < } */ 'text'}`, []TokenType{StartExpressionToken, TextToken, TextToken, EndExpressionToken}, }, { "expression with nested strings", "{`${`${`${foo}`}`}`}", []TokenType{StartExpressionToken, TextToken, TextToken, TextToken, TextToken, TextToken, EndExpressionToken}, }, { "element with multiple expressions", "<div>Hello {first} {last}</div>", []TokenType{StartTagToken, TextToken, StartExpressionToken, TextToken, EndExpressionToken, TextToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, { "ternary render", "{false ? <div>#f</div> : <div>#t</div>}", []TokenType{StartExpressionToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, StartTagToken, TextToken, EndTagToken, EndExpressionToken}, }, { "title", "<title>test {expr} test</title>", []TokenType{StartTagToken, TextToken, StartExpressionToken, TextToken, EndExpressionToken, TextToken, EndTagToken}, }, { "String interpolation inside an expression within a title", "<title>{content.title && `${title} 🚀 ${title}`}</title>", []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, { "Nested use of string templates inside expressions", "<div>{`${a} inner${a > 1 ? 's' : ''}.`}</div>", []TokenType{StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, EndTagToken}, }, { "expression with single quote", `{true && <div>Don't panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, TextToken, EndTagToken, EndExpressionToken}, }, { "expression with double quote", `{true && <div>Don't panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, TextToken, EndTagToken, EndExpressionToken}, }, { "expression with literal quote", `{true && <div>Don` + "`" + `t panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, TextToken, EndTagToken, EndExpressionToken}, }, { "ternary expression with single quote", `{true ? <div>Don't panic</div> : <div>Do' panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, TextToken, EndTagToken, TextToken, StartTagToken, TextToken, EndTagToken, EndExpressionToken}, }, { "single quote after expression", `{true && <div>{value} Don't panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, StartExpressionToken, TextToken, EndExpressionToken, TextToken, EndTagToken, EndExpressionToken}, }, { "single quote after self-closing", `{true && <div><span /> Don't panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, SelfClosingTagToken, TextToken, EndTagToken, EndExpressionToken}, }, { "single quote after end tag", `{true && <div><span></span> Don't panic</div>}`, []TokenType{StartExpressionToken, TextToken, StartTagToken, StartTagToken, EndTagToken, TextToken, EndTagToken, EndExpressionToken}, }, } runTokenTypeTest(t, Expressions) } func TestAttributes(t *testing.T) { Attributes := []AttributeTest{ { "double quoted", `<div a="value" />`, []AttributeType{QuotedAttribute}, }, { "single quoted", `<div a='value' />`, []AttributeType{QuotedAttribute}, }, { "not quoted", `<div a=value />`, []AttributeType{QuotedAttribute}, }, { "expression", `<div a={value} />`, []AttributeType{ExpressionAttribute}, }, { "expression with apostrophe", `<div a="fred's" />`, []AttributeType{QuotedAttribute}, }, { "expression with template literal", "<div a=\"`value`\" />", []AttributeType{QuotedAttribute}, }, { "expression with template literal interpolation", "<div a=\"`${value}`\" />", []AttributeType{QuotedAttribute}, }, { "shorthand", `<div {value} />`, []AttributeType{ShorthandAttribute}, }, { "less than expression", `<div a={a < b} />`, []AttributeType{ExpressionAttribute}, }, { "greater than expression", `<div a={a > b} />`, []AttributeType{ExpressionAttribute}, }, { "spread", `<div {...value} />`, []AttributeType{SpreadAttribute}, }, { "template literal", "<div a=`value` />", []AttributeType{TemplateLiteralAttribute}, }, { "all", "<div a='value' a={value} {value} {...value} a=`value` />", []AttributeType{QuotedAttribute, ExpressionAttribute, ShorthandAttribute, SpreadAttribute, TemplateLiteralAttribute}, }, { "multiple quoted", `<div a="value" b='value' c=value/>`, []AttributeType{QuotedAttribute, QuotedAttribute, QuotedAttribute}, }, { "expression with quoted braces", `<div value={ "{" } />`, []AttributeType{ExpressionAttribute}, }, { "attribute expression with solidus inside template literal", "<div value={attr ? `a/b` : \"c\"} />", []AttributeType{ExpressionAttribute}, }, { "attribute expression with solidus inside template literal with trailing text", "<div value={`${attr ? `a/b` : \"c\"} awesome`} />", []AttributeType{ExpressionAttribute}, }, { "iframe allows attributes", "<iframe src=\"https://google.com\"></iframe>", []AttributeType{QuotedAttribute}, }, { "shorthand attribute with comment", "<div {/* a comment */ value} />", []AttributeType{ShorthandAttribute}, }, { "expression with comment", "<div a={/* a comment */ value} />", []AttributeType{ExpressionAttribute}, }, } runAttributeTypeTest(t, Attributes) } func TestLoc(t *testing.T) { Locs := []LocTest{ { "doctype", `<!DOCTYPE html>`, []int{0, 11}, }, { "frontmatter", `--- doesNotExist --- `, []int{0, 1, 4}, }, { "expression", `<div>{console.log(hey)}</div>`, []int{0, 2, 6, 7, 23, 26}, }, { "expression II", `{"hello" + hey}`, []int{0, 1, 2, 9, 15}, }, { "element I", `<div></div>`, []int{0, 2, 8}, }, } runTokenLocTest(t, Locs) } func runTokenTypeTest(t *testing.T, suite []TokenTypeTest) { for _, tt := range suite { value := test_utils.Dedent(tt.input) t.Run(tt.name, func(t *testing.T) { tokens := make([]TokenType, 0) tokenizer := NewTokenizer(strings.NewReader(value)) var next TokenType for { next = tokenizer.Next() if next == ErrorToken { break } tokens = append(tokens, next) } if !reflect.DeepEqual(tokens, tt.expected) { t.Errorf("Tokens = %v\nExpected = %v", tokens, tt.expected) } }) } } func runAttributeTypeTest(t *testing.T, suite []AttributeTest) { for _, tt := range suite { value := test_utils.Dedent(tt.input) t.Run(tt.name, func(t *testing.T) { attributeTypes := make([]AttributeType, 0) tokenizer := NewTokenizer(strings.NewReader(value)) var next TokenType for { next = tokenizer.Next() if next == ErrorToken { break } for _, attr := range tokenizer.Token().Attr { attributeTypes = append(attributeTypes, attr.Type) } } if !reflect.DeepEqual(attributeTypes, tt.expected) { t.Errorf("Attributes = %v\nExpected = %v", attributeTypes, tt.expected) } }) } } func runTokenLocTest(t *testing.T, suite []LocTest) { for _, tt := range suite { value := test_utils.Dedent(tt.input) t.Run(tt.name, func(t *testing.T) { locs := make([]int, 0) tokenizer := NewTokenizer(strings.NewReader(value)) var next TokenType locs = append(locs, tokenizer.Token().Loc.Start) for { next = tokenizer.Next() if next == ErrorToken { break } tok := tokenizer.Token() locs = append(locs, tok.Loc.Start+1) } if !reflect.DeepEqual(locs, tt.expected) { t.Errorf("Tokens = %v\nExpected = %v", locs, tt.expected) } }) } } ================================================ FILE: internal/transform/scope-css.go ================================================ package transform import ( // "strings" "fmt" "strings" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/lib/esbuild/css_parser" "github.com/withastro/compiler/lib/esbuild/css_printer" "github.com/withastro/compiler/lib/esbuild/logger" a "golang.org/x/net/html/atom" ) // Take a slice of DOM nodes, and scope CSS within every <style> tag func ScopeStyle(styles []*astro.Node, opts TransformOptions) bool { didScope := false for _, n := range styles { if n.DataAtom != a.Style { continue } if hasTruthyAttr(n, "global") { fmt.Printf("Found `<style global>` in %s! Please migrate to the `is:global` directive.\n", opts.Filename) continue } if hasTruthyAttr(n, "is:global") { continue } if n.FirstChild == nil || strings.TrimSpace(n.FirstChild.Data) == "" { if !HasAttr(n, "define:vars") { continue } } didScope = true n.Attr = append(n.Attr, astro.Attribute{ Key: "data-astro-id", Val: opts.Scope, }) if n.FirstChild == nil || strings.TrimSpace(n.FirstChild.Data) == "" { continue } scopeStrategy := css_printer.ScopeStrategyWhere if opts.ScopedStyleStrategy == "class" { scopeStrategy = css_printer.ScopeStrategyClass } else if opts.ScopedStyleStrategy == "attribute" { scopeStrategy = css_printer.ScopeStrategyAttribute } // Use vendored version of esbuild internals to parse AST tree := css_parser.Parse(logger.Log{AddMsg: func(msg logger.Msg) {}}, logger.Source{Contents: n.FirstChild.Data}, css_parser.Options{MinifySyntax: false, MinifyWhitespace: true}) // esbuild's internal `css_printer` has been modified to emit Astro scoped styles result := css_printer.Print(tree, css_printer.Options{MinifyWhitespace: true, Scope: opts.Scope, ScopeStrategy: scopeStrategy}) n.FirstChild.Data = string(result.CSS) } return didScope } func GetDefineVars(styles []*astro.Node) []string { values := make([]string, 0) for _, n := range styles { if n.DataAtom != a.Style { continue } if !HasAttr(n, "define:vars") { continue } attr := GetAttr(n, "define:vars") if attr != nil { switch attr.Type { case astro.QuotedAttribute: values = append(values, fmt.Sprintf("'%s'", attr.Val)) case astro.TemplateLiteralAttribute: values = append(values, fmt.Sprintf("`%s`", attr.Val)) case astro.ExpressionAttribute: values = append(values, attr.Val) } } } return values } ================================================ FILE: internal/transform/scope-css_test.go ================================================ package transform import ( "strings" "testing" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/test_utils" ) func TestScopeStyle(t *testing.T) { // note: the tests have hashes inlined because it’s easier to read // note: this must be valid CSS, hence the empty "{}" tests := []struct { name string source string want string }{ { name: "class", source: ".class{}", want: ".class:where(.astro-xxxxxx){}", }, { name: "id", source: "#class{}", want: "#class:where(.astro-xxxxxx){}", }, { name: "element", source: "h1{}", want: "h1:where(.astro-xxxxxx){}", }, { name: "adjacent sibling", source: ".class+.class{}", want: ".class:where(.astro-xxxxxx)+.class:where(.astro-xxxxxx){}", }, { name: "and selector", source: ".class,.class{}", want: ".class:where(.astro-xxxxxx),.class:where(.astro-xxxxxx){}", }, { name: "children universal", source: ".class *{}", want: ".class:where(.astro-xxxxxx) :where(.astro-xxxxxx){}", }, { name: "attr", source: "a[aria-current=page]{}", want: "a:where(.astro-xxxxxx)[aria-current=page]{}", }, { name: "attr universal implied", source: "[aria-visible],[aria-hidden]{}", want: ":where(.astro-xxxxxx)[aria-visible],:where(.astro-xxxxxx)[aria-hidden]{}", }, { name: "universal pseudo state", source: "*:hover{}", want: ":where(.astro-xxxxxx):hover{}", }, { name: "immediate child universal", source: ".class>*{}", want: ".class:where(.astro-xxxxxx)>:where(.astro-xxxxxx){}", }, { name: "element + pseudo state", source: ".class button:focus{}", want: ".class:where(.astro-xxxxxx) button:where(.astro-xxxxxx):focus{}", }, { name: "element + pseudo element", source: ".class h3::before{}", want: ".class:where(.astro-xxxxxx) h3:where(.astro-xxxxxx)::before{}", }, { name: "media query", source: "@media screen and (min-width:640px){.class{}}", want: "@media screen and (min-width:640px){.class:where(.astro-xxxxxx){}}", }, { name: "element + pseudo state + pseudo element", source: "button:focus::before{}", want: "button:where(.astro-xxxxxx):focus::before{}", }, { name: "global children", source: ".class :global(ul li){}", want: ".class:where(.astro-xxxxxx) ul li{}", }, { name: "global universal", source: ".class :global(*){}", want: ".class:where(.astro-xxxxxx) *{}", }, { name: "global with scoped children", source: ":global(section) .class{}", want: "section .class:where(.astro-xxxxxx){}", }, { name: "subsequent siblings + global", source: ".class~:global(a){}", want: ".class:where(.astro-xxxxxx)~a{}", }, { name: "global nested parens", source: ".class :global(.nav:not(.is-active)){}", want: ".class:where(.astro-xxxxxx) .nav:not(.is-active){}", }, { name: "global nested parens + chained class", source: ":global(body:not(.is-light)).is-dark,:global(body:not(.is-dark)).is-light{}", want: "body:not(.is-light).is-dark,body:not(.is-dark).is-light{}", }, { name: "global chaining global", source: ":global(.foo):global(.bar){}", want: ".foo.bar{}", }, { name: "class chained global", source: ".class:global(.bar){}", want: ".class:where(.astro-xxxxxx).bar{}", // technically this may be incorrect, but would require a lookahead to fix }, { name: "chained :not()", source: ".class:not(.is-active):not(.is-disabled){}", want: ".class:where(.astro-xxxxxx):not(.is-active):not(.is-disabled){}", }, { name: "weird chaining", source: ":hover.a:focus{}", // yes this is valid. yes I’m just upset as you are :( want: ":hover.a:where(.astro-xxxxxx):focus{}", }, { name: "more weird chaining", source: ":not(.is-disabled).a{}", want: ":not(.is-disabled).a:where(.astro-xxxxxx){}", }, { name: "body", source: "body h1{}", want: "body h1:where(.astro-xxxxxx){}", }, { name: "body class", source: "body.theme-dark{}", want: "body.theme-dark{}", }, { name: "html and body", source: "html,body{}", want: "html,body{}", }, { name: ":root", source: ":root{}", want: ":root{}", }, { name: "escaped characters", source: ".class\\:class:focus{}", want: ".class\\:class:where(.astro-xxxxxx):focus{}", }, { name: "only pseudo element", source: ".class>::before{}", want: ".class:where(.astro-xxxxxx)>:where(.astro-xxxxxx)::before{}", }, { name: "only pseudo class + pseudo element", source: ".class>:not(:first-child)::after{}", want: ".class:where(.astro-xxxxxx)>:where(.astro-xxxxxx):not(:first-child)::after{}", }, { name: "nested only pseudo element", source: ".class{& .other_class{&::after{}}}", want: ".class:where(.astro-xxxxxx){& .other_class:where(.astro-xxxxxx){&::after{}}}", }, { name: "global with nesting pseudo class", source: "div :global(.curve){&:last-of-type{transform:scaleY(-1)}}", want: "div:where(.astro-xxxxxx) .curve{&:last-of-type{transform:scaleY(-1)}}", }, { name: "global with nesting pseudo element", source: "div :global(.icon){&::after{content:''}}", want: "div:where(.astro-xxxxxx) .icon{&::after{content:\"\"}}", }, { name: "global with nesting multiple pseudo", source: ".wrap :global(.item){&:first-child:hover{color:red}}", want: ".wrap:where(.astro-xxxxxx) .item{&:first-child:hover{color:red}}", }, { name: "nesting & with pseudo element", source: ".class{&::before{}}", want: ".class:where(.astro-xxxxxx){&::before{}}", }, { name: "nesting & with pseudo class", source: ".class{&:hover{}}", want: ".class:where(.astro-xxxxxx){&:hover{}}", }, { name: "nesting & with multiple pseudo classes", source: ".class{&:hover:focus{}}", want: ".class:where(.astro-xxxxxx){&:hover:focus{}}", }, // the following tests assert we leave valid CSS alone { name: "attributes", source: "body{background-image:url('/assets/bg.jpg');clip-path:polygon(0% 0%,100% 0%,100% 100%,0% 100%);}", want: "body{background-image:url(/assets/bg.jpg);clip-path:polygon(0% 0%,100% 0%,100% 100%,0% 100%)}", }, { name: "variables", source: "body{--bg:red;background:var(--bg);color:black;}", want: "body{--bg:red;background:var(--bg);color:black}", }, { name: "keyframes", source: "@keyframes shuffle{from{transform:rotate(0deg);}to{transform:rotate(360deg);}}", want: "@keyframes shuffle{from{transform:rotate(0deg)}to{transform:rotate(360deg)}}", }, { name: "keyframes 2", source: "@keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}}", want: "@keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}}", }, { name: "keyframes start", source: "@keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}} h1{} h2{}", want: "@keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}}h1:where(.astro-xxxxxx){}h2:where(.astro-xxxxxx){}", }, { name: "keyframes middle", source: "h1{} @keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}} h2{}", want: "h1:where(.astro-xxxxxx){}@keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}}h2:where(.astro-xxxxxx){}", }, { name: "keyframes end", source: "h1{} h2{} @keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}}", want: "h1:where(.astro-xxxxxx){}h2:where(.astro-xxxxxx){}@keyframes shuffle{0%{transform:rotate(0deg);color:blue}100%{transform:rotate(360deg)}}", }, { name: "calc", source: ":root{padding:calc(var(--space) * 2);}", want: ":root{padding:calc(var(--space) * 2)}", }, { name: "grid-template-columns", source: "div{grid-template-columns: [content-start] 1fr [content-end];}", want: "div:where(.astro-xxxxxx){grid-template-columns:[content-start] 1fr [content-end]}", }, { name: "charset", source: "@charset \"utf-8\";", want: "@charset \"utf-8\";", }, { name: "import (plain)", source: "@import \"./my-file.css\";", want: "@import\"./my-file.css\";", }, { name: "import (url)", source: "@import url(\"./my-file.css\");", want: "@import\"./my-file.css\";", }, { name: "valid CSS, madeup syntax", source: "@tailwind base;", want: "@tailwind base;", }, { name: "invalid CSS (`missing semi`)", source: `.foo { color: blue font-size: 18px; }`, want: `.foo:where(.astro-xxxxxx){color:blue font-size: 18px}`, }, { name: "nesting media", source: ":global(html) { @media (min-width: 640px) { color: blue } }html { background-color: lime }", want: "html{@media (min-width: 640px){color:blue}}html{background-color:lime}", }, { name: "nesting combinator", source: "div { & span { color: blue } }", want: "div:where(.astro-xxxxxx){& span:where(.astro-xxxxxx){color:blue}}", }, { name: "nesting modifier", source: ".header { background-color: white; &.dark { background-color: blue; }}", want: ".header:where(.astro-xxxxxx){background-color:white;&.dark{background-color:blue}}", }, { name: "nesting without ampersand", source: ".nesting-root{p{color:#123456}:global(h1){color:#abcdef}}", want: ".nesting-root:where(.astro-xxxxxx){p{color:#123456}h1{color:#abcdef}}", }, { name: "nested descendant selector should stay unscoped", source: "nav{a{color:deeppink}}", want: "nav:where(.astro-xxxxxx){a{color:deeppink}}", }, { name: "@container", source: `@container (min-width: 200px) and (min-height: 200px) { h1 { font-size: 30px; } }`, want: "@container (min-width: 200px) and (min-height: 200px){h1:where(.astro-xxxxxx){font-size:30px}}", }, { name: "@layer", source: "@layer theme, layout, utilities; @layer special { .item { color: rebeccapurple; }}", want: "@layer theme,layout,utilities;@layer special{.item:where(.astro-xxxxxx){color:rebeccapurple}}", }, { name: "@starting-style", source: "@starting-style{.class{}}", want: "@starting-style{.class:where(.astro-xxxxxx){}}", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // note: the "{}" is only added to make it valid CSS code := test_utils.Dedent("<style>\n" + tt.source + " \n</style>") doc, err := astro.Parse(strings.NewReader(code)) if err != nil { t.Error(err) } styleEl := doc.LastChild.FirstChild.FirstChild // note: root is <html>, and we need to get <style> which lives in head styles := []*astro.Node{styleEl} ScopeStyle(styles, TransformOptions{Scope: "xxxxxx"}) got := styles[0].FirstChild.Data if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } ================================================ FILE: internal/transform/scope-html.go ================================================ package transform import ( "fmt" "strings" astro "github.com/withastro/compiler/internal" "golang.org/x/net/html/atom" ) func ScopeElement(n *astro.Node, opts TransformOptions) { if n.Type == astro.ElementNode { if _, noScope := NeverScopedElements[n.Data]; !noScope { injectScopedClass(n, opts) } } } func AddDefineVars(n *astro.Node, values []string) bool { if n.Type == astro.ElementNode && !n.Component { if _, noScope := NeverScopedElements[n.Data]; !noScope { if !IsImplicitNode(n) { injectDefineVars(n, values) return true } } } return false } func AnnotateElement(n *astro.Node, opts TransformOptions) { if n.Type == astro.ElementNode && !n.Component && !n.Fragment { if _, noScope := NeverScopedElements[n.Data]; !noScope { annotateElement(n, opts) } } } var NeverScopedElements map[string]bool = map[string]bool{ "Fragment": true, "base": true, "font": true, "frame": true, "frameset": true, "head": true, "link": true, "meta": true, "noframes": true, "noscript": true, "script": true, "style": true, "slot": true, "title": true, } var NeverScopedSelectors map[string]bool = map[string]bool{ ":root": true, } func annotateElement(n *astro.Node, opts TransformOptions) { if n.DataAtom == atom.Html { return } n.Attr = append(n.Attr, astro.Attribute{ Key: "data-astro-source-file", Type: astro.QuotedAttribute, Val: opts.Filename, }) } func injectDefineVars(n *astro.Node, values []string) { definedVars := "$$definedVars" for i, attr := range n.Attr { if attr.Key == "style" { switch attr.Type { case astro.ShorthandAttribute: attr.Type = astro.ExpressionAttribute attr.Val = fmt.Sprintf("`${%s}; ${%s}`", attr.Key, definedVars) n.Attr[i] = attr return case astro.EmptyAttribute: attr.Type = astro.ExpressionAttribute attr.Val = definedVars n.Attr[i] = attr return case astro.QuotedAttribute: attr.Type = astro.ExpressionAttribute attr.Val = fmt.Sprintf("`${\"%s\"}; ${%s}`", attr.Val, definedVars) n.Attr[i] = attr return case astro.TemplateLiteralAttribute: attr.Type = astro.ExpressionAttribute attr.Val = fmt.Sprintf("`${`%s`}; ${%s}`", attr.Val, definedVars) n.Attr[i] = attr return case astro.ExpressionAttribute: attr.Type = astro.ExpressionAttribute trimmed := strings.TrimSpace(attr.Val) if trimmed[0] == '{' { attr.Val = fmt.Sprintf("[%s,%s]", trimmed, definedVars) } else { attr.Val = fmt.Sprintf("`${%s}; ${%s}`", attr.Val, definedVars) } n.Attr[i] = attr return } } } n.Attr = append(n.Attr, astro.Attribute{ Key: "style", Type: astro.ExpressionAttribute, Val: definedVars, }) } func injectScopedClass(n *astro.Node, opts TransformOptions) { hasSpreadAttr := false if opts.ScopedStyleStrategy != "attribute" { scopedClass := fmt.Sprintf(`astro-%s`, opts.Scope) for i, attr := range n.Attr { if !hasSpreadAttr && attr.Type == astro.SpreadAttribute { // We only handle this special case on built-in elements hasSpreadAttr = !n.Component } // If we find an existing class attribute, append the scoped class if attr.Key == "class" || (n.Component && attr.Key == "className") { switch attr.Type { case astro.ShorthandAttribute: if n.Component { attr.Val = fmt.Sprintf(`%s + " %s"`, attr.Key, scopedClass) attr.Type = astro.ExpressionAttribute n.Attr[i] = attr return } case astro.EmptyAttribute: // instead of an empty string attr.Type = astro.QuotedAttribute attr.Val = scopedClass n.Attr[i] = attr return case astro.QuotedAttribute, astro.TemplateLiteralAttribute: // as a plain string attr.Val = fmt.Sprintf(`%s %s`, attr.Val, scopedClass) n.Attr[i] = attr return case astro.ExpressionAttribute: // as an expression attr.Val = fmt.Sprintf(`((%s) ?? "") + " %s"`, attr.Val, scopedClass) n.Attr[i] = attr return } } if attr.Key == "class:list" { switch attr.Type { case astro.EmptyAttribute: // instead of an empty string attr.Type = astro.QuotedAttribute attr.Val = "astro-" + opts.Scope n.Attr[i] = attr return case astro.QuotedAttribute, astro.TemplateLiteralAttribute: // as a plain string attr.Val = attr.Val + " astro-" + opts.Scope n.Attr[i] = attr return case astro.ExpressionAttribute: // as an expression attr.Val = fmt.Sprintf(`[(%s), "%s"]`, attr.Val, scopedClass) n.Attr[i] = attr return } } } // If there's a spread attribute, `class` might be there, so do not inject `class` here // `class` will be injected by the `spreadAttributes` helper if hasSpreadAttr { return } // If we didn't find an existing class attribute, let's add one n.Attr = append(n.Attr, astro.Attribute{ Key: "class", Type: astro.QuotedAttribute, Val: scopedClass, }) } else { n.Attr = append(n.Attr, astro.Attribute{ Key: fmt.Sprintf(`data-astro-cid-%s`, opts.Scope), Type: astro.EmptyAttribute, }) } } ================================================ FILE: internal/transform/scope-html_test.go ================================================ package transform import ( "strings" "testing" "unicode/utf8" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "golang.org/x/net/html/atom" ) func tests() []struct { name string source string want string } { return []struct { name string source string want string }{ { name: "none", source: "<div />", want: `<div class="astro-xxxxxx"></div>`, }, { name: "quoted", source: `<div class="test" />`, want: `<div class="test astro-xxxxxx"></div>`, }, { name: "quoted no trim", source: `<div class="test " />`, want: `<div class="test astro-xxxxxx"></div>`, }, { name: "expression string", source: `<div class={"test"} />`, want: `<div class={(("test") ?? "") + " astro-xxxxxx"}></div>`, }, { name: "expression function", source: `<div class={clsx({ [test]: true })} />`, want: `<div class={((clsx({ [test]: true })) ?? "") + " astro-xxxxxx"}></div>`, }, { name: "expression dynamic", source: "<div class={condition ? 'a' : 'b'} />", want: `<div class={((condition ? 'a' : 'b') ?? "") + " astro-xxxxxx"}></div>`, }, { name: "empty", source: "<div class />", want: `<div class="astro-xxxxxx"></div>`, }, { name: "template literal", source: "<div class=`${value}` />", want: "<div class=`${value} astro-xxxxxx`></div>", }, { name: "component className not scoped", source: `<Component className="test" />`, want: `<Component className="test astro-xxxxxx"></Component>`, }, { name: "component className expression", source: `<Component className={"test"} />`, want: `<Component className={(("test") ?? "") + " astro-xxxxxx"}></Component>`, }, { name: "component className shorthand", source: "<Component {className} />", want: `<Component className={className + " astro-xxxxxx"}></Component>`, }, { name: "element class:list", source: "<div class:list={{ a: true }} />", want: `<div class:list={[({ a: true }), "astro-xxxxxx"]}></div>`, }, { name: "element class:list string", source: "<div class:list=\"weird but ok\" />", want: `<div class:list="weird but ok astro-xxxxxx"></div>`, }, { name: "component class:list", source: "<Component class:list={{ a: true }} />", want: `<Component class:list={[({ a: true }), "astro-xxxxxx"]}></Component>`, }, { name: "fault input currently accepted", source: `<A { 0>`, want: `<A 0>={0>} class="astro-xxxxxx"></A>`, }, } } func TestScopeHTML(t *testing.T) { tests := tests() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { h := handler.NewHandler(tt.source, "TestScopeHTML.astro") nodes, err := astro.ParseFragmentWithOptions(strings.NewReader(tt.source), &astro.Node{Type: astro.ElementNode, DataAtom: atom.Body, Data: atom.Body.String()}, astro.ParseOptionWithHandler(h)) if err != nil { t.Error(err) } ScopeElement(nodes[0], TransformOptions{Scope: "xxxxxx"}) var b strings.Builder astro.PrintToSource(&b, nodes[0]) got := b.String() if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } // check whether another pass doesn't error nodes, err = astro.ParseFragmentWithOptions(strings.NewReader(tt.source), &astro.Node{Type: astro.ElementNode, DataAtom: atom.Body, Data: atom.Body.String()}, astro.ParseOptionWithHandler(h)) if err != nil { t.Error(err) } ScopeElement(nodes[0], TransformOptions{Scope: "xxxxxx"}) astro.PrintToSource(&b, nodes[0]) }) } } func FuzzScopeHTML(f *testing.F) { tests := tests() for _, tt := range tests { f.Add(tt.source) // Use f.Add to provide a seed corpus } f.Fuzz(func(t *testing.T, source string) { h := handler.NewHandler(source, "FuzzScopeHTML.astro") nodes, err := astro.ParseFragmentWithOptions(strings.NewReader(source), &astro.Node{Type: astro.ElementNode, DataAtom: atom.Body, Data: atom.Body.String()}, astro.ParseOptionWithHandler(h)) if err != nil { t.Error(err) } // if the doc doesn't parse as an element node, we don't care if len(nodes) == 0 || nodes[0].Type != astro.ElementNode { t.Skip(nodes) } ScopeElement(nodes[0], TransformOptions{Scope: "xxxxxx"}) // nodes[0] should still be an element node if len(nodes) == 0 || nodes[0].Type != astro.ElementNode { t.Errorf("`nodes[0]` is not an element node: %q\n nodes[0].Type: %q", source, nodes[0].Type) } var b strings.Builder astro.PrintToSource(&b, nodes[0]) got := b.String() if !strings.Contains(got, "astro-xxxxxx") { t.Errorf("HTML scoping failed to include the astro scope\n source: %q\n got: %q\n `nodes[0].Data: %q", source, got, nodes[0].Data) } if utf8.ValidString(source) && !utf8.ValidString(got) { t.Errorf("HTML scoping produced invalid html string: %q", got) } }) } ================================================ FILE: internal/transform/transform.go ================================================ package transform import ( "fmt" "path/filepath" "strings" "unicode" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" "github.com/withastro/compiler/internal/js_scanner" "github.com/withastro/compiler/internal/loc" a "golang.org/x/net/html/atom" ) const TRANSITION_ANIMATE = "transition:animate" const TRANSITION_NAME = "transition:name" const TRANSITION_PERSIST = "transition:persist" const DATA_ASTRO_RELOAD = "data-astro-reload" const TRANSITION_PERSIST_PROPS = "transition:persist-props" const SERVER_DEFER = "server:defer" type TransformOptions struct { Scope string Filename string NormalizedFilename string InternalURL string SourceMap string AstroGlobalArgs string ScopedStyleStrategy string Compact bool ResultScopedSlot bool TransitionsAnimationURL string ResolvePath func(string) string PreprocessStyle interface{} AnnotateSourceFile bool } func Transform(doc *astro.Node, opts TransformOptions, h *handler.Handler) *astro.Node { shouldScope := len(doc.Styles) > 0 && ScopeStyle(doc.Styles, opts) definedVars := GetDefineVars(doc.Styles) didAddDefinedVars := false i := 0 walk(doc, func(n *astro.Node) { i++ WarnAboutRerunOnExternalESMs(n, h) WarnAboutMisplacedReload(n, h) HintAboutImplicitInlineDirective(n, h) ExtractScript(doc, n, &opts, h) AddComponentProps(doc, n, &opts) if shouldScope { ScopeElement(n, opts) } if HasAttr(n, TRANSITION_ANIMATE) || HasAttr(n, TRANSITION_NAME) || HasAttr(n, TRANSITION_PERSIST) { doc.Transition = true doc.HeadPropagation = true getOrCreateTransitionScope(n, &opts, i) } if HasAttr(n, SERVER_DEFER) { doc.HeadPropagation = true } if len(definedVars) > 0 { didAdd := AddDefineVars(n, definedVars) if !didAddDefinedVars { didAddDefinedVars = didAdd } } mergeClassList(doc, n, &opts) if n.DataAtom == a.Head && !IsImplicitNode(n) { doc.ContainsHead = true } if opts.AnnotateSourceFile { AnnotateElement(n, opts) } }) if len(definedVars) > 0 && !didAddDefinedVars { for _, style := range doc.Styles { for _, a := range style.Attr { if a.Key == "define:vars" { h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_CANNOT_DEFINE_VARS, Text: "Unable to inject `define:vars` declaration", Range: loc.Range{Loc: a.KeyLoc, Len: len("define:vars")}, Hint: "Try wrapping this component in an element so that Astro can inject a \"style\" attribute.", }) } } } } NormalizeSetDirectives(doc, h) // If we've emptied out all the nodes, this was a Fragment that only contained hoisted elements // Add an empty FrontmatterNode to allow the empty component to be printed if doc.FirstChild == nil { empty := &astro.Node{ Type: astro.FrontmatterNode, } empty.AppendChild(&astro.Node{ Type: astro.TextNode, Data: "", }) doc.AppendChild(empty) } TrimTrailingSpace(doc) if opts.Compact { collapseWhitespace(doc) } return doc } func ExtractStyles(doc *astro.Node, opts *TransformOptions) { walk(doc, func(n *astro.Node) { if n.Type == astro.ElementNode && n.DataAtom == a.Style { if HasSetDirective(n) || HasInlineDirective(n) { return } // Ignore styles in svg/noscript/etc if !IsHoistable(n, false) { return } // append node to maintain authored order doc.Styles = append(doc.Styles, n) } }) // Important! Remove styles from original location *after* walking the doc for _, style := range doc.Styles { removeNodeWithTrailingWhitespace(style) } } // removeNodeWithTrailingWhitespace removes a node and also removes any preceding // whitespace-only text node if it would become trailing whitespace after removal. // This prevents orphaned whitespace when extracting style/script tags. func removeNodeWithTrailingWhitespace(n *astro.Node) { prev := n.PrevSibling next := n.NextSibling // Check if we should remove preceding whitespace: // 1. There is a previous sibling // 2. It's a whitespace-only text node // 3. After removing n, this whitespace would be trailing (no more non-whitespace siblings after) if prev != nil && prev.Type == astro.TextNode && strings.TrimSpace(prev.Data) == "" { // Check if there are any non-whitespace siblings after n hasNonWhitespaceAfter := false for sib := next; sib != nil; sib = sib.NextSibling { if sib.Type != astro.TextNode || strings.TrimSpace(sib.Data) != "" { hasNonWhitespaceAfter = true break } } // If no non-whitespace content follows, remove the preceding whitespace if !hasNonWhitespaceAfter { prev.Parent.RemoveChild(prev) } } n.Parent.RemoveChild(n) } func NormalizeSetDirectives(doc *astro.Node, h *handler.Handler) { var nodes []*astro.Node var directives []*astro.Attribute walk(doc, func(n *astro.Node) { if n.Type == astro.ElementNode && HasSetDirective(n) { for _, attr := range n.Attr { if attr.Key == "set:html" || attr.Key == "set:text" { nodes = append(nodes, n) directives = append(directives, &attr) return } } } }) if len(nodes) > 0 { for i, n := range nodes { directive := directives[i] n.RemoveAttribute(directive.Key) var nodeToAppend *astro.Node var shouldWrapInQuotes, isTemplateLiteralAttribute, isQuotedAttribute, isExpressionAttribute, shouldWrapInTemplateLiteral, shouldAddExpression bool switch directive.Type { case astro.QuotedAttribute: isQuotedAttribute = true case astro.TemplateLiteralAttribute: isTemplateLiteralAttribute = true case astro.ExpressionAttribute: isExpressionAttribute = true } if directive.Key == "set:html" && isQuotedAttribute { shouldWrapInQuotes = true } if isTemplateLiteralAttribute { shouldWrapInTemplateLiteral = true } if directive.Key == "set:html" || (directive.Key == "set:text" && isTemplateLiteralAttribute) || isExpressionAttribute { shouldAddExpression = true } l := make([]loc.Loc, 1) l = append(l, directive.ValLoc) data := directive.Val if shouldWrapInQuotes { data = fmt.Sprintf("\"%s\"", data) } if shouldWrapInTemplateLiteral { data = fmt.Sprintf("`%s`", data) } if directive.Key == "set:html" && isExpressionAttribute { data = fmt.Sprintf("$$unescapeHTML(%s)", data) } if shouldAddExpression { nodeToAppend = &astro.Node{ Type: astro.ElementNode, Data: "astro:expression", Expression: true, } nodeToAppend.AppendChild(&astro.Node{ Type: astro.TextNode, Data: data, Loc: l, }) } else { nodeToAppend = &astro.Node{ Type: astro.TextNode, Data: data, Loc: l, } } shouldWarn := false // Remove all existing children for c := n.FirstChild; c != nil; c = c.NextSibling { if !shouldWarn { shouldWarn = c.Type == astro.CommentNode || (c.Type == astro.TextNode && len(strings.TrimSpace(c.Data)) != 0) } n.RemoveChild(c) } if shouldWarn { h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_SET_WITH_CHILDREN, Text: fmt.Sprintf("%s directive will overwrite child nodes.", directive.Key), Range: loc.Range{Loc: directive.KeyLoc, Len: len(directive.Key)}, Hint: "Remove the child nodes to suppress this warning.", }) } n.AppendChild(nodeToAppend) } } } func TrimTrailingSpace(doc *astro.Node) { if doc.LastChild == nil { return } if doc.LastChild.Type == astro.TextNode { doc.LastChild.Data = strings.TrimRightFunc(doc.LastChild.Data, unicode.IsSpace) return } n := doc.LastChild for i := 0; i < 2; i++ { // Loop through implicit nodes to find final trailing text node (html > body > #text) if n != nil && n.Type == astro.ElementNode && IsImplicitNode(n) { n = n.LastChild continue } else { n = nil break } } // Collapse all trailing text nodes for n != nil && n.Type == astro.TextNode { n.Data = strings.TrimRightFunc(n.Data, unicode.IsSpace) n = n.PrevSibling } } func isRawElement(n *astro.Node) bool { if n.Type == astro.FrontmatterNode { return true } for _, attr := range n.Attr { if attr.Key == "is:raw" { return true } } rawTags := []string{"pre", "listing", "iframe", "noembed", "noframes", "math", "plaintext", "script", "style", "textarea", "title", "xmp"} for _, tag := range rawTags { if n.Data == tag { return true } } return false } func isWhitespaceInsensitiveElement(n *astro.Node) bool { return n.Data == "head" } func collapseWhitespace(doc *astro.Node) { walk(doc, func(n *astro.Node) { if n.Type == astro.TextNode { // Don't trim any whitespace if the node or any of its ancestors is raw if n.Closest(isRawElement) != nil { return } // Trim the whitespace on each end of top-level expressions if n.Parent != nil && n.Parent.Expression { // Trim left whitespace in the first child if n.PrevSibling == nil { n.Data = strings.TrimLeftFunc(n.Data, unicode.IsSpace) } // Trim right whitespace in the last child if n.NextSibling == nil { n.Data = strings.TrimRightFunc(n.Data, unicode.IsSpace) } // Don't trim any more! return } // If the node is only whitespace, clear it if len(strings.TrimFunc(n.Data, unicode.IsSpace)) == 0 { // If it's a lone text node, or if it's within a whitespace-insensitive element, clear completely if (n.PrevSibling == nil && n.NextSibling == nil) || n.Closest(isWhitespaceInsensitiveElement) != nil { n.Data = "" } else { n.Data = " " } return } // Collapse left whitespace into a single space originalLen := len(n.Data) hasNewline := false n.Data = strings.TrimLeftFunc(n.Data, func(r rune) bool { if r == '\n' { hasNewline = true } return unicode.IsSpace(r) }) if originalLen != len(n.Data) { if hasNewline { n.Data = "\n" + n.Data } else { n.Data = " " + n.Data } } // Collapse right whitespace into a single space originalLen = len(n.Data) hasNewline = false n.Data = strings.TrimRightFunc(n.Data, func(r rune) bool { if r == '\n' { hasNewline = true } return unicode.IsSpace(r) }) if originalLen != len(n.Data) { if hasNewline { n.Data = n.Data + "\n" } else { n.Data = n.Data + " " } } } }) } func WarnAboutMisplacedReload(n *astro.Node, h *handler.Handler) { if HasAttr(n, DATA_ASTRO_RELOAD) { attr := &n.Attr[AttrIndex(n, DATA_ASTRO_RELOAD)] /* * When set on <a>, <form> or <area>, * the data-astro-reload attribute replaces view transitions between pages with a full page loads. */ if n.Type != astro.ElementNode || n.Data != "a" && n.Data != "area" && n.Data != "form" { h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING, Text: "The data-astro-reload attribute is only supported on <a>, <form> and <area> elements.", Range: loc.Range{Loc: attr.KeyLoc, Len: len(attr.Key)}, }) } } } func WarnAboutRerunOnExternalESMs(n *astro.Node, h *handler.Handler) { if n.Data == "script" && HasAttr(n, "src") && HasAttr(n, "type") && HasAttr(n, "data-astro-rerun") { /* * The browser caches external ECMAScript Modules. Even if such a script is included several times on a page, * it will only run once. This means that the data-astro-rerun attribute will not have any effect. */ src := &n.Attr[AttrIndex(n, "src")] typ := &n.Attr[AttrIndex(n, "type")] rerun := &n.Attr[AttrIndex(n, "data-astro-rerun")] if typ.Val == "module" && src.Val != "" { h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_CANNOT_RERUN, Text: "The data-astro-rerun attribute is not supported on an external module <script>", Hint: "Two out of three is OK: type=\"module\", src=\"...\", or data-astro-rerun", Range: loc.Range{Loc: rerun.KeyLoc, Len: len(rerun.Key)}, }) } } } func ExtractScript(doc *astro.Node, n *astro.Node, opts *TransformOptions, h *handler.Handler) { if n.Type == astro.ElementNode && n.DataAtom == a.Script { if HasSetDirective(n) || HasInlineDirective(n) { return } // Ignore scripts in svg/noscript/etc if !IsHoistable(n, true) { return } // if <script>, hoist to the document root // If also using define:vars, that overrides the hoist tag. if (hasTruthyAttr(n, "hoist")) || len(n.Attr) == 0 || (len(n.Attr) == 1 && n.Attr[0].Key == "src") { shouldAdd := true for _, attr := range n.Attr { if attr.Key == "hoist" { h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_DEPRECATED_DIRECTIVE, Text: "<script hoist> is no longer needed. You may remove the `hoist` attribute.", Range: loc.Range{Loc: n.Loc[0], Len: len(n.Data)}, }) } if attr.Key == "src" { if attr.Type == astro.ExpressionAttribute { shouldAdd = false h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_UNSUPPORTED_EXPRESSION, Text: "<script> uses an expression for the src attribute and will be ignored.", Hint: fmt.Sprintf("Replace src={%s} with a string literal", attr.Val), Range: loc.Range{Loc: n.Loc[0], Len: len(n.Data)}, }) break } } } // append node to maintain authored order if shouldAdd { doc.Scripts = append(doc.Scripts, n) n.HandledScript = true } } else { for _, attr := range n.Attr { if strings.HasPrefix(attr.Key, "client:") { h.AppendWarning(&loc.ErrorWithRange{ Code: loc.WARNING_IGNORED_DIRECTIVE, Text: fmt.Sprintf("<script> does not need the %s directive and is always added as a module script.", attr.Key), Range: loc.Range{Loc: n.Loc[0], Len: len(n.Data)}, }) } } } } } func HintAboutImplicitInlineDirective(n *astro.Node, h *handler.Handler) { if n.Type == astro.ElementNode && n.DataAtom == a.Script && len(n.Attr) > 0 && !HasInlineDirective(n) { if len(n.Attr) == 1 && n.Attr[0].Key == "src" { return } h.AppendHint(&loc.ErrorWithRange{ Code: loc.HINT, Text: "This script will be treated as if it has the `is:inline` directive because it contains an attribute. Therefore, features that require processing (e.g. using TypeScript or npm packages in the script) are unavailable.\n\nSee docs for more details: https://docs.astro.build/en/guides/client-side-scripts/#script-processing.\n\nAdd the `is:inline` directive explicitly to silence this hint.", Range: loc.Range{Loc: n.Attr[0].KeyLoc, Len: len(n.Attr[0].Key)}, }) } } func AddComponentProps(doc *astro.Node, n *astro.Node, opts *TransformOptions) { if n.Type == astro.ElementNode && (n.Component || n.CustomElement) { for _, attr := range n.Attr { if strings.HasPrefix(attr.Key, "client:") { parts := strings.Split(attr.Key, ":") directive := parts[1] // Add the hydration directive so it can be extracted statically. doc.HydrationDirectives[directive] = true hydrationAttr := astro.Attribute{ Key: "client:component-hydration", Val: directive, } n.Attr = append(n.Attr, hydrationAttr) if attr.Key == "client:only" { doc.ClientOnlyComponentNodes = append([]*astro.Node{n}, doc.ClientOnlyComponentNodes...) match := matchNodeToImportStatement(doc, n) if match != nil { doc.ClientOnlyComponents = append(doc.ClientOnlyComponents, &astro.HydratedComponentMetadata{ ExportName: match.ExportName, Specifier: match.Specifier, ResolvedPath: ResolveIdForMatch(match.Specifier, opts), }) } break } // prepend node to maintain authored order doc.HydratedComponentNodes = append([]*astro.Node{n}, doc.HydratedComponentNodes...) match := matchNodeToImportStatement(doc, n) if match != nil { doc.HydratedComponents = append(doc.HydratedComponents, &astro.HydratedComponentMetadata{ ExportName: match.ExportName, Specifier: match.Specifier, ResolvedPath: ResolveIdForMatch(match.Specifier, opts), }) pathAttr := astro.Attribute{ Key: "client:component-path", Val: fmt.Sprintf(`"%s"`, ResolveIdForMatch(match.Specifier, opts)), Type: astro.ExpressionAttribute, } n.Attr = append(n.Attr, pathAttr) exportAttr := astro.Attribute{ Key: "client:component-export", Val: fmt.Sprintf(`"%s"`, match.ExportName), Type: astro.ExpressionAttribute, } n.Attr = append(n.Attr, exportAttr) } break } else if strings.HasPrefix(attr.Key, "server:") { parts := strings.Split(attr.Key, ":") directive := parts[1] hydrationAttr := astro.Attribute{ Key: "server:component-directive", Val: directive, } n.Attr = append(n.Attr, hydrationAttr) match := matchNodeToImportStatement(doc, n) if match != nil { doc.ServerComponents = append(doc.ServerComponents, &astro.HydratedComponentMetadata{ ExportName: match.ExportName, LocalName: n.Data, Specifier: match.Specifier, ResolvedPath: ResolveIdForMatch(match.Specifier, opts), }) pathAttr := astro.Attribute{ Key: "server:component-path", Val: fmt.Sprintf(`"%s"`, ResolveIdForMatch(match.Specifier, opts)), Type: astro.ExpressionAttribute, } n.Attr = append(n.Attr, pathAttr) exportAttr := astro.Attribute{ Key: "server:component-export", Val: fmt.Sprintf(`"%s"`, match.ExportName), Type: astro.ExpressionAttribute, } n.Attr = append(n.Attr, exportAttr) } } } } } type ImportMatch struct { ExportName string Specifier string } func matchNodeToImportStatement(doc *astro.Node, n *astro.Node) *ImportMatch { var match *ImportMatch eachImportStatement(doc, func(stmt js_scanner.ImportStatement) bool { for _, imported := range stmt.Imports { exportName, isUsed := js_scanner.ExtractComponentExportName(n.Data, imported) if isUsed { match = &ImportMatch{ ExportName: exportName, Specifier: stmt.Specifier, } return false } } return true }) return match } func ResolveIdForMatch(id string, opts *TransformOptions) string { // Try custom resolvePath if provided if opts.ResolvePath != nil { return opts.ResolvePath(id) } else if opts.Filename != "<stdin>" && id[0] == '.' { return filepath.Join(filepath.Dir(opts.Filename), id) } else { return id } } func eachImportStatement(doc *astro.Node, cb func(stmt js_scanner.ImportStatement) bool) { if doc.FirstChild.Type == astro.FrontmatterNode && doc.FirstChild.FirstChild != nil { source := []byte(doc.FirstChild.FirstChild.Data) loc, statement := js_scanner.NextImportStatement(source, 0) for loc != -1 { if !cb(statement) { break } loc, statement = js_scanner.NextImportStatement(source, loc) } } } func walk(doc *astro.Node, cb func(*astro.Node)) { var f func(*astro.Node) f = func(n *astro.Node) { cb(n) for c := n.FirstChild; c != nil; c = c.NextSibling { f(c) } } f(doc) } // This function merges the values of `class=""` and `class:list=""` in `class:list` func mergeClassList(doc *astro.Node, n *astro.Node, opts *TransformOptions) { var classListAttrValue string var classListAttrIndex int = -1 var classAttrType astro.AttributeType var classAttrValue string var classAttrIndex int = -1 for i, attr := range n.Attr { if attr.Key == "class:list" { classListAttrValue = attr.Val classListAttrIndex = i } if attr.Key == "class" { classAttrType = attr.Type classAttrValue = attr.Val classAttrIndex = i } } // Check if both `class:list` and `class` attributes are present if classListAttrIndex >= 0 && classAttrIndex >= 0 { // Merge the `class` attribute value into `class:list` if classAttrType == astro.ExpressionAttribute { // If the `class` attribute is an expression, include it directly without surrounding quotes. // This respects the fact that expressions are evaluated dynamically and should not be treated as strings. n.Attr[classListAttrIndex].Val = fmt.Sprintf("[%s, %s]", classAttrValue, classListAttrValue) } else { // If the `class` attribute is a static string, wrap it in quotes. // This ensures that static class names are treated as string values within the list. n.Attr[classListAttrIndex].Val = fmt.Sprintf("['%s', %s]", classAttrValue, classListAttrValue) } // Now that the value of `class` is carried by `class:list`, we can remove the `class` node from the AST. // Doing so will allow us to generate valid HTML at runtime n.Attr = remove(n.Attr, classAttrIndex) } } func remove(slice []astro.Attribute, s int) []astro.Attribute { return append(slice[:s], slice[s+1:]...) } func getOrCreateTransitionScope(n *astro.Node, opts *TransformOptions, i int) string { if n.TransitionScope != "" { return n.TransitionScope } n.TransitionScope = astro.HashString(fmt.Sprintf("%s-%v", opts.Scope, i)) return n.TransitionScope } ================================================ FILE: internal/transform/transform_test.go ================================================ package transform import ( "regexp" "strings" "testing" "unicode/utf8" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" ) func transformScopingFixtures() []struct { name string source string want string scopeStyle string // "attribute" | "class" | "where" } { return []struct { name string source string want string scopeStyle string }{ { name: "basic", source: ` <style>div { color: red }</style> <div /> `, want: `<div class="astro-xxxxxx"></div>`, }, { name: "global empty", source: ` <style is:global>div { color: red }</style> <div /> `, want: `<div></div>`, }, { name: "global true", source: ` <style is:global={true}>div { color: red }</style> <div /> `, want: `<div></div>`, }, { name: "global string", source: ` <style is:global="">div { color: red }</style> <div /> `, want: `<div></div>`, }, { name: "global string true", source: ` <style is:global="true">div { color: red }</style> <div /> `, want: `<div></div>`, }, { name: "empty (space)", source: ` <style> </style> <div /> `, want: `<div></div>`, }, { name: "empty (nil)", source: ` <style></style> <div /> `, want: `<div></div>`, }, { name: "empty (define:vars)", source: ` <style define:vars={{ a }}></style> <div /> `, want: `<div class="astro-xxxxxx" style={$$definedVars}></div>`, }, { name: "scoped multiple", source: ` <style>div { color: red }</style> <style>div { color: green }</style> <div /> `, want: `<div class="astro-xxxxxx"></div>`, }, { name: "global multiple", source: ` <style is:global>div { color: red }</style> <style is:global>div { color: green }</style> <div /> `, want: `<div></div>`, }, { name: "mixed multiple", source: ` <style>div { color: red }</style> <style is:global>div { color: green }</style> <div /> `, want: `<div class="astro-xxxxxx"></div>`, }, { name: "multiple scoped :global", source: ` <style>:global(test-2) {}</style> <style>:global(test-1) {}</style> <div /> `, want: `<div class="astro-xxxxxx"></div>`, }, { name: "inline does not scope", source: ` <style is:inline>div{}</style> <div /> `, want: `<div></div>`, }, { name: "attribute -> creates a new data attribute", source: ` <style>.class{}</style> <div /> `, want: `<div data-astro-cid-xxxxxx></div>`, scopeStyle: "attribute", }, { name: "attribute -> creates data attribute when there's a class", source: ` <style>.font{}</style> <div /> `, want: `<div data-astro-cid-xxxxxx></div>`, scopeStyle: "attribute", }, { name: "attribute -> creates data attribute when there's a CSS class", source: ` <style>.font{}</style> <div /> `, want: `<div data-astro-cid-xxxxxx></div>`, scopeStyle: "attribute", }, { name: "attribute -> creates data attribute when there's already a class attribute", source: ` <style>.font{}</style> <div class="foo" /> `, want: `<div class="foo" data-astro-cid-xxxxxx></div>`, scopeStyle: "attribute", }, } } func TestTransformScoping(t *testing.T) { tests := transformScopingFixtures() var b strings.Builder for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b.Reset() doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Error(err) } var scopeStyle string if tt.scopeStyle == "attribute" { scopeStyle = "attribute" } else if tt.scopeStyle == "class" { scopeStyle = "class" } else { scopeStyle = "where" } transformOptions := TransformOptions{Scope: "xxxxxx", ScopedStyleStrategy: scopeStyle} ExtractStyles(doc, &transformOptions) Transform(doc, transformOptions, handler.NewHandler(tt.source, "/test.astro")) astro.PrintToSource(&b, doc.LastChild.FirstChild.NextSibling.FirstChild) got := b.String() if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } func FuzzTransformScoping(f *testing.F) { tests := transformScopingFixtures() for _, tt := range tests { f.Add(tt.source) // Use f.Add to provide a seed corpus } f.Fuzz(func(t *testing.T, source string) { doc, err := astro.Parse(strings.NewReader(source)) if err != nil { t.Skip("Invalid parse, skipping rest of fuzz test") } transformOptions := TransformOptions{Scope: "xxxxxx"} ExtractStyles(doc, &transformOptions) Transform(doc, transformOptions, handler.NewHandler(source, "/test.astro")) var b strings.Builder astro.PrintToSource(&b, doc.LastChild.FirstChild.NextSibling.FirstChild) got := b.String() // hacky - we only expect scoping for non global styles / non inline styles testRegex := regexp.MustCompile(`is:global|:global\(|is:inline|<style>\s*</style>`) if !testRegex.MatchString(source) && !strings.Contains(got, "astro-xxxxxx") { t.Errorf("HTML scoping failed to include the astro scope\n source: %q\n got: %q", source, got) } if utf8.ValidString(source) && !utf8.ValidString(got) { t.Errorf("HTML scoping produced invalid html string: %q", got) } }) } func TestFullTransform(t *testing.T) { tests := []struct { name string source string want string }{ { name: "top-level component with leading style", source: `<style>:root{}</style><Component><h1>Hello world</h1></Component>`, want: `<Component><h1>Hello world</h1></Component>`, }, { name: "top-level component with leading style body", source: `<style>:root{}</style><Component><div><h1>Hello world</h1></div></Component>`, want: `<Component><div><h1>Hello world</h1></div></Component>`, }, { name: "top-level component with trailing style", source: `<Component><h1>Hello world</h1></Component><style>:root{}</style>`, want: `<Component><h1>Hello world</h1></Component>`, }, { name: "Component before html I", source: `<Navigation /><html><body><h1>Astro</h1></body></html>`, want: `<Navigation></Navigation><h1>Astro</h1>`, }, { name: "Component before html II", source: `<MainHead title={title} description={description} /><html lang="en"><body><slot /></body></html>`, want: `<MainHead title={title} description={description}></MainHead><slot></slot>`, }, { name: "respects explicitly authored elements", source: `<html><Component /></html>`, want: `<html><Component></Component></html>`, }, { name: "respects explicitly authored elements 2", source: `<head></head><Component />`, want: `<head></head><Component></Component>`, }, { name: "respects explicitly authored elements 3", source: `<body><Component /></body>`, want: `<body><Component></Component></body>`, }, { name: "removes implicitly generated elements", source: `<Component />`, want: `<Component></Component>`, }, { name: "works with nested components", source: `<style></style><A><div><B /></div></A>`, want: `<A><div><B></B></div></A>`, }, { name: "does not remove trailing siblings", source: `<title>Title</title><span /><Component /><span />`, want: `<title>Title</title><span></span><Component></Component><span></span>`, }, } var b strings.Builder for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b.Reset() doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Error(err) } transformOptions := TransformOptions{} ExtractStyles(doc, &transformOptions) // Clear doc.Styles to avoid scoping behavior, we're not testing that here doc.Styles = make([]*astro.Node, 0) Transform(doc, transformOptions, handler.NewHandler(tt.source, "/test.astro")) astro.PrintToSource(&b, doc) got := strings.TrimSpace(b.String()) if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } func TestTransformTransitionAndHeadPropagationFlags(t *testing.T) { tests := []struct { name string source string wantTransition bool wantHeadPropagation bool }{ { name: "server:defer only", source: `<Component server:defer />`, wantTransition: false, wantHeadPropagation: true, }, { name: "transition directive", source: `<div transition:animate="slide"></div>`, wantTransition: true, wantHeadPropagation: true, }, { name: "transition:persist-props alone does not count as transition directive", source: `<Component transition:persist-props="true" />`, wantTransition: false, wantHeadPropagation: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Fatal(err) } transformOptions := TransformOptions{} ExtractStyles(doc, &transformOptions) Transform(doc, transformOptions, handler.NewHandler(tt.source, "/test.astro")) if doc.Transition != tt.wantTransition { t.Fatalf("unexpected doc.Transition value: want %v, got %v", tt.wantTransition, doc.Transition) } if doc.HeadPropagation != tt.wantHeadPropagation { t.Fatalf("unexpected doc.HeadPropagation value: want %v, got %v", tt.wantHeadPropagation, doc.HeadPropagation) } }) } } func TestTransformTrailingSpace(t *testing.T) { tests := []struct { name string source string want string }{ { name: "component with trailing space", source: "<h1>Hello world</h1>\n\n\t ", want: `<h1>Hello world</h1>`, }, { name: "component with no trailing space", source: "<h1>Hello world</h1>", want: "<h1>Hello world</h1>", }, { name: "component with leading and trailing space", source: "<span/>\n\n\t <h1>Hello world</h1>\n\n\t ", want: "<span></span>\n\n\t <h1>Hello world</h1>", }, { name: "html with explicit space", source: "<html><body>\n\n\n</body></html>", want: "<html><body>\n\n\n</body></html>", }, { name: "trailing whitespace before style is removed", source: "<html><head></head><body><slot />\n<style>div { color: red; }</style></body></html>", want: "<html><head></head><body><slot></slot></body></html>", }, } var b strings.Builder for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b.Reset() doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Error(err) } transformOptions := TransformOptions{} ExtractStyles(doc, &transformOptions) // Clear doc.Styles to avoid scoping behavior, we're not testing that here doc.Styles = make([]*astro.Node, 0) Transform(doc, transformOptions, handler.NewHandler(tt.source, "/test.astro")) astro.PrintToSource(&b, doc) got := b.String() if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } func TestCompactTransform(t *testing.T) { tests := []struct { name string source string want string }{ { name: "trims whitespace", source: `<div> Test </div>`, want: `<div> Test </div>`, }, { name: "pre", source: `<pre> Test </pre>`, want: `<pre> Test </pre>`, }, { name: "textarea", source: `<textarea> Test </textarea>`, want: `<textarea> Test </textarea>`, }, { name: "deep pre", source: `<pre> <div> Test </div> </pre>`, want: `<pre> <div> Test </div> </pre>`, }, { name: "remove whitespace only", source: `<head> <script>console.log("test")</script> <head>`, want: `<head><script>console.log("test")</script></head>`, }, { name: "collapse surrounding whitespace", source: `<div> COOL </div>`, want: `<div> COOL </div>`, }, { name: "collapse only surrounding whitespace", source: `<div> C O O L </div>`, want: `<div> C O O L </div>`, }, { name: "collapse surrounding newlines", source: "<div>\n\n\tC O O L\n\n\t</div>", want: "<div>\nC O O L\n</div>", }, { name: "collapse in-between inline elements", source: "<div>Click <a>here</a> <span>space</span></div>", want: "<div>Click <a>here</a> <span>space</span></div>", }, { name: "expression trim first", source: "<div>{\n() => {\n\t\treturn <span />}}</div>", want: "<div>{() => {\n\t\treturn <span></span>}}</div>", }, { name: "expression trim last", source: "<div>{() => {\n\t\treturn <span />}\n}</div>", want: "<div>{() => {\n\t\treturn <span></span>}}</div>", }, { name: "expression collapse inside", source: "<div>{() => {\n\t\treturn <span> HEY </span>}}</div>", want: "<div>{() => {\n\t\treturn <span> HEY </span>}}</div>", }, { name: "expression collapse newlines", source: "<div>{() => {\n\t\treturn <span>\n\nTEST</span>}}</div>", want: "<div>{() => {\n\t\treturn <span>\nTEST</span>}}</div>", }, { name: "expression remove only whitespace", source: "<div>{() => {\n\t\treturn <span>\n\n\n</span>}}</div>", want: "<div>{() => {\n\t\treturn <span></span>}}</div>", }, { name: "attributes", source: `<div a="1" b={0} />`, want: `<div a="1" b={0}></div>`, }, { name: "expression quoted", source: "<div test={\n` test `\n} />", want: "<div test={` test `}></div>", }, { name: "expression attribute math", source: "<div test={ a + b } />", want: "<div test={a + b}></div>", }, { name: "expression math", source: "<div>{ a + b }</div>", want: "<div>{a + b}</div>", }, } var b strings.Builder for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b.Reset() doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Error(err) } transformOptions := TransformOptions{ Compact: true, } ExtractStyles(doc, &transformOptions) // Clear doc.Styles to avoid scoping behavior, we're not testing that here doc.Styles = make([]*astro.Node, 0) Transform(doc, transformOptions, &handler.Handler{}) astro.PrintToSource(&b, doc) got := strings.TrimSpace(b.String()) if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } func TestAnnotation(t *testing.T) { tests := []struct { name string source string want string }{ { name: "basic", source: `<div>Hello world!</div>`, want: `<div data-astro-source-file="/src/pages/index.astro">Hello world!</div>`, }, { name: "no components", source: `<Component>Hello world!</Component>`, want: `<Component>Hello world!</Component>`, }, { name: "injects root", source: `<html></html>`, want: `<html></html>`, }, } var b strings.Builder for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b.Reset() doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Error(err) } h := handler.NewHandler(tt.source, "/src/pages/index.astro") Transform(doc, TransformOptions{ AnnotateSourceFile: true, Filename: "/src/pages/index.astro", NormalizedFilename: "/src/pages/index.astro", }, h) astro.PrintToSource(&b, doc) got := strings.TrimSpace(b.String()) if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } func TestClassAndClassListMerging(t *testing.T) { tests := []struct { name string source string want string }{ { name: "Single class attribute", source: `<div class="astro-xxxxxx" />`, want: `<div class="astro-xxxxxx"></div>`, }, { name: "Class attribute with parameter", source: "<div class={`astro-xxxxxx ${astro}`} />", want: "<div class={`astro-xxxxxx ${astro}`}></div>", }, { name: "Single class:list attribute", source: `<div class:list={"astro-xxxxxx"} />`, want: `<div class:list={"astro-xxxxxx"}></div>`, }, { name: "Merge class with empty class:list (double quotes)", source: `<div class="astro-xxxxxx" class:list={} />`, want: `<div class:list={['astro-xxxxxx', ]}></div>`, }, { name: "Merge class with empty class:list (single quotes)", source: `<div class='astro-xxxxxx' class:list={} />`, want: `<div class:list={['astro-xxxxxx', ]}></div>`, }, { name: "Merge class and class:list attributes (string)", source: `<div class="astro-xxxxxx" class:list={"astro-yyyyyy"} />`, want: `<div class:list={['astro-xxxxxx', "astro-yyyyyy"]}></div>`, }, { name: "Merge class and class:list attributes (expression)", source: `<div class={"astro-xxxxxx"} class:list={"astro-yyyyyy"} />`, want: `<div class:list={["astro-xxxxxx", "astro-yyyyyy"]}></div>`, }, { name: "Merge Class and Class List Attributes (concatenation)", source: `<div class={"astro-xxxxxx" + name} class:list={"astro-yyyyyy"} />`, want: `<div class:list={["astro-xxxxxx" + name, "astro-yyyyyy"]}></div>`, }, } var b strings.Builder for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b.Reset() doc, err := astro.Parse(strings.NewReader(tt.source)) if err != nil { t.Error(err) } Transform(doc, TransformOptions{}, handler.NewHandler(tt.source, "/test.astro")) astro.PrintToSource(&b, doc.LastChild.FirstChild.NextSibling.FirstChild) got := b.String() if tt.want != got { t.Errorf("\nFAIL: %s\n want: %s\n got: %s", tt.name, tt.want, got) } }) } } ================================================ FILE: internal/transform/utils.go ================================================ package transform import ( astro "github.com/withastro/compiler/internal" "golang.org/x/net/html/atom" ) func hasTruthyAttr(n *astro.Node, key string) bool { for _, attr := range n.Attr { if attr.Key == key && (attr.Type == astro.EmptyAttribute) || (attr.Type == astro.ExpressionAttribute && attr.Val == "true") || (attr.Type == astro.QuotedAttribute && (attr.Val == "" || attr.Val == "true")) { return true } } return false } func HasSetDirective(n *astro.Node) bool { return HasAttr(n, "set:html") || HasAttr(n, "set:text") } func HasInlineDirective(n *astro.Node) bool { return HasAttr(n, "is:inline") } func AttrIndex(n *astro.Node, key string) int { for i, attr := range n.Attr { if attr.Key == key { return i } } return -1 } func HasAttr(n *astro.Node, key string) bool { return AttrIndex(n, key) != -1 } func GetAttr(n *astro.Node, key string) *astro.Attribute { for _, attr := range n.Attr { if attr.Key == key { return &attr } } return nil } func IsHoistable(n *astro.Node, renderScript bool) bool { parent := n.Closest(func(p *astro.Node) bool { return p.DataAtom == atom.Svg || p.DataAtom == atom.Noscript || p.DataAtom == atom.Template }) if renderScript && parent != nil && parent.Expression { return true } return parent == nil } func IsImplicitNode(n *astro.Node) bool { return HasAttr(n, astro.ImplicitNodeMarker) } func IsImplicitNodeMarker(attr astro.Attribute) bool { return attr.Key == astro.ImplicitNodeMarker } func IsTopLevel(n *astro.Node) bool { if IsImplicitNode(n) || n.Data == "" { return false } p := n.Parent if p == nil { return true } if IsImplicitNode(p) || p.Data == "" { return true } if p.Component { return IsTopLevel(p) } return false } func GetQuotedAttr(n *astro.Node, key string) string { for _, attr := range n.Attr { if attr.Key == key { if attr.Type == astro.QuotedAttribute { return attr.Val } return "" } } return "" } ================================================ FILE: internal/xxhash/LICENSE.txt ================================================ Copyright (c) 2016 Caleb Spare MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: internal/xxhash/xxhash.go ================================================ // Package xxhash implements the 64-bit variant of xxHash (XXH64) as described // at http://cyan4973.github.io/xxHash/. package xxhash import ( "encoding/binary" "errors" "math/bits" ) const ( prime1 uint64 = 11400714785074694791 prime2 uint64 = 14029467366897019727 prime3 uint64 = 1609587929392839161 prime4 uint64 = 9650029242287828579 prime5 uint64 = 2870177450012600261 ) // NOTE(caleb): I'm using both consts and vars of the primes. Using consts where // possible in the Go code is worth a small (but measurable) performance boost // by avoiding some MOVQs. Vars are needed for the asm and also are useful for // convenience in the Go code in a few places where we need to intentionally // avoid constant arithmetic (e.g., v1 := prime1 + prime2 fails because the // result overflows a uint64). var ( prime1v = prime1 prime2v = prime2 prime3v = prime3 prime4v = prime4 prime5v = prime5 ) // Digest implements hash.Hash64. type Digest struct { v1 uint64 v2 uint64 v3 uint64 v4 uint64 total uint64 mem [32]byte n int // how much of mem is used } // New creates a new Digest that computes the 64-bit xxHash algorithm. func New() *Digest { var d Digest d.Reset() return &d } // Reset clears the Digest's state so that it can be reused. func (d *Digest) Reset() { d.v1 = prime1v + prime2 d.v2 = prime2 d.v3 = 0 d.v4 = -prime1v d.total = 0 d.n = 0 } // Size always returns 8 bytes. func (d *Digest) Size() int { return 8 } // BlockSize always returns 32 bytes. func (d *Digest) BlockSize() int { return 32 } // Write adds more data to d. It always returns len(b), nil. func (d *Digest) Write(b []byte) (n int, err error) { n = len(b) d.total += uint64(n) if d.n+n < 32 { // This new data doesn't even fill the current block. copy(d.mem[d.n:], b) d.n += n return } if d.n > 0 { // Finish off the partial block. copy(d.mem[d.n:], b) d.v1 = round(d.v1, u64(d.mem[0:8])) d.v2 = round(d.v2, u64(d.mem[8:16])) d.v3 = round(d.v3, u64(d.mem[16:24])) d.v4 = round(d.v4, u64(d.mem[24:32])) b = b[32-d.n:] d.n = 0 } if len(b) >= 32 { // One or more full blocks left. nw := writeBlocks(d, b) b = b[nw:] } // Store any remaining partial block. copy(d.mem[:], b) d.n = len(b) return } // Sum appends the current hash to b and returns the resulting slice. func (d *Digest) Sum(b []byte) []byte { s := d.Sum64() return append( b, byte(s>>56), byte(s>>48), byte(s>>40), byte(s>>32), byte(s>>24), byte(s>>16), byte(s>>8), byte(s), ) } // Sum64 returns the current hash. func (d *Digest) Sum64() uint64 { var h uint64 if d.total >= 32 { v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) h = mergeRound(h, v1) h = mergeRound(h, v2) h = mergeRound(h, v3) h = mergeRound(h, v4) } else { h = d.v3 + prime5 } h += d.total i, end := 0, d.n for ; i+8 <= end; i += 8 { k1 := round(0, u64(d.mem[i:i+8])) h ^= k1 h = rol27(h)*prime1 + prime4 } if i+4 <= end { h ^= uint64(u32(d.mem[i:i+4])) * prime1 h = rol23(h)*prime2 + prime3 i += 4 } for i < end { h ^= uint64(d.mem[i]) * prime5 h = rol11(h) * prime1 i++ } h ^= h >> 33 h *= prime2 h ^= h >> 29 h *= prime3 h ^= h >> 32 return h } const ( magic = "xxh\x06" marshaledSize = len(magic) + 8*5 + 32 ) // MarshalBinary implements the encoding.BinaryMarshaler interface. func (d *Digest) MarshalBinary() ([]byte, error) { b := make([]byte, 0, marshaledSize) b = append(b, magic...) b = appendUint64(b, d.v1) b = appendUint64(b, d.v2) b = appendUint64(b, d.v3) b = appendUint64(b, d.v4) b = appendUint64(b, d.total) b = append(b, d.mem[:d.n]...) b = b[:len(b)+len(d.mem)-d.n] return b, nil } // UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. func (d *Digest) UnmarshalBinary(b []byte) error { if len(b) < len(magic) || string(b[:len(magic)]) != magic { return errors.New("xxhash: invalid hash state identifier") } if len(b) != marshaledSize { return errors.New("xxhash: invalid hash state size") } b = b[len(magic):] b, d.v1 = consumeUint64(b) b, d.v2 = consumeUint64(b) b, d.v3 = consumeUint64(b) b, d.v4 = consumeUint64(b) b, d.total = consumeUint64(b) copy(d.mem[:], b) d.n = int(d.total % uint64(len(d.mem))) return nil } func appendUint64(b []byte, x uint64) []byte { var a [8]byte binary.LittleEndian.PutUint64(a[:], x) return append(b, a[:]...) } func consumeUint64(b []byte) ([]byte, uint64) { x := u64(b) return b[8:], x } func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) } func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } func round(acc, input uint64) uint64 { acc += input * prime2 acc = rol31(acc) acc *= prime1 return acc } func mergeRound(acc, val uint64) uint64 { val = round(0, val) acc ^= val acc = acc*prime1 + prime4 return acc } func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) } func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) } func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) } func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) } func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) } func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) } func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) } func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) } ================================================ FILE: internal/xxhash/xxhash_other.go ================================================ package xxhash // Sum64 computes the 64-bit xxHash digest of b. func Sum64(b []byte) uint64 { // A simpler version would be // d := New() // d.Write(b) // return d.Sum64() // but this is faster, particularly for small inputs. n := len(b) var h uint64 if n >= 32 { v1 := prime1v + prime2 v2 := prime2 v3 := uint64(0) v4 := -prime1v for len(b) >= 32 { v1 = round(v1, u64(b[0:8:len(b)])) v2 = round(v2, u64(b[8:16:len(b)])) v3 = round(v3, u64(b[16:24:len(b)])) v4 = round(v4, u64(b[24:32:len(b)])) b = b[32:len(b):len(b)] } h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) h = mergeRound(h, v1) h = mergeRound(h, v2) h = mergeRound(h, v3) h = mergeRound(h, v4) } else { h = prime5 } h += uint64(n) i, end := 0, len(b) for ; i+8 <= end; i += 8 { k1 := round(0, u64(b[i:i+8:len(b)])) h ^= k1 h = rol27(h)*prime1 + prime4 } if i+4 <= end { h ^= uint64(u32(b[i:i+4:len(b)])) * prime1 h = rol23(h)*prime2 + prime3 i += 4 } for ; i < end; i++ { h ^= uint64(b[i]) * prime5 h = rol11(h) * prime1 } h ^= h >> 33 h *= prime2 h ^= h >> 29 h *= prime3 h ^= h >> 32 return h } func writeBlocks(d *Digest, b []byte) int { v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 n := len(b) for len(b) >= 32 { v1 = round(v1, u64(b[0:8:len(b)])) v2 = round(v2, u64(b[8:16:len(b)])) v3 = round(v3, u64(b[16:24:len(b)])) v4 = round(v4, u64(b[24:32:len(b)])) b = b[32:len(b):len(b)] } d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4 return n - len(b) } ================================================ FILE: internal_wasm/utils/utils.go ================================================ //go:build js && wasm package wasm_utils import ( "runtime/debug" "strings" "syscall/js" "github.com/norunners/vert" astro "github.com/withastro/compiler/internal" "github.com/withastro/compiler/internal/handler" ) // See https://stackoverflow.com/questions/68426700/how-to-wait-a-js-async-function-from-golang-wasm func Await(awaitable js.Value) ([]js.Value, []js.Value) { then := make(chan []js.Value) thenFunc := js.FuncOf(func(this js.Value, args []js.Value) interface{} { then <- args return nil }) // defers are called LIFO! // This will `close` before `Release()` defer thenFunc.Release() defer close(then) catch := make(chan []js.Value) catchFunc := js.FuncOf(func(this js.Value, args []js.Value) interface{} { catch <- args return nil }) // defers are called LIFO! // This will `close` before `Release()` defer catchFunc.Release() defer close(catch) awaitable.Call("then", thenFunc).Call("catch", catchFunc) select { case result := <-then: return result, nil case err := <-catch: return nil, err } } func GetAttrs(n *astro.Node) js.Value { attrs := js.Global().Get("Object").New() for _, attr := range n.Attr { switch attr.Type { case astro.QuotedAttribute: attrs.Set(attr.Key, attr.Val) case astro.EmptyAttribute: attrs.Set(attr.Key, true) } } return attrs } type JSError struct { Message string `js:"message"` Stack string `js:"stack"` } func (err *JSError) Value() js.Value { return vert.ValueOf(err).Value } func ErrorToJSError(h *handler.Handler, err error) js.Value { stack := string(debug.Stack()) message := strings.TrimSpace(err.Error()) jsError := JSError{ Message: message, Stack: stack, } return jsError.Value() } ================================================ FILE: lib/esbuild/LICENSE.md ================================================ MIT License Copyright (c) 2020 Evan Wallace Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: lib/esbuild/ast/ast.go ================================================ package ast import "github.com/withastro/compiler/lib/esbuild/logger" // This file contains data structures that are used with the AST packages for // both JavaScript and CSS. This helps the bundler treat both AST formats in // a somewhat format-agnostic manner. type ImportKind uint8 const ( // An entry point provided by the user ImportEntryPoint ImportKind = iota // An ES6 import or re-export statement ImportStmt // A call to "require()" ImportRequire // An "import()" expression with a string argument ImportDynamic // A call to "require.resolve()" ImportRequireResolve // A CSS "@import" rule ImportAt // A CSS "@import" rule with import conditions ImportAtConditional // A CSS "url(...)" token ImportURL ) func (kind ImportKind) StringForMetafile() string { switch kind { case ImportStmt: return "import-statement" case ImportRequire: return "require-call" case ImportDynamic: return "dynamic-import" case ImportRequireResolve: return "require-resolve" case ImportAt, ImportAtConditional: return "import-rule" case ImportURL: return "url-token" case ImportEntryPoint: return "entry-point" default: panic("Internal error") } } func (kind ImportKind) IsFromCSS() bool { return kind == ImportAt || kind == ImportURL } type ImportRecordFlags uint16 const ( // Sometimes the parser creates an import record and decides it isn't needed. // For example, TypeScript code may have import statements that later turn // out to be type-only imports after analyzing the whole file. IsUnused ImportRecordFlags = 1 << iota // If this is true, the import contains syntax like "* as ns". This is used // to determine whether modules that have no exports need to be wrapped in a // CommonJS wrapper or not. ContainsImportStar // If this is true, the import contains an import for the alias "default", // either via the "import x from" or "import {default as x} from" syntax. ContainsDefaultAlias // If this is true, the import contains an import for the alias "__esModule", // via the "import {__esModule} from" syntax. ContainsESModuleAlias // If true, this "export * from 'path'" statement is evaluated at run-time by // calling the "__reExport()" helper function CallsRunTimeReExportFn // Tell the printer to wrap this call to "require()" in "__toESM(...)" WrapWithToESM // Tell the printer to wrap this ESM exports object in "__toCJS(...)" WrapWithToCJS // Tell the printer to use the runtime "__require()" instead of "require()" CallRuntimeRequire // True for the following cases: // // try { require('x') } catch { handle } // try { await import('x') } catch { handle } // try { require.resolve('x') } catch { handle } // import('x').catch(handle) // import('x').then(_, handle) // // In these cases we shouldn't generate an error if the path could not be // resolved. HandlesImportErrors // If true, this was originally written as a bare "import 'file'" statement WasOriginallyBareImport // If true, this import can be removed if it's unused IsExternalWithoutSideEffects ) func (flags ImportRecordFlags) Has(flag ImportRecordFlags) bool { return (flags & flag) != 0 } type ImportRecord struct { Assertions *[]AssertEntry Path logger.Path Range logger.Range // If the "HandlesImportErrors" flag is present, then this is the location // of the error handler. This is used for error reporting. ErrorHandlerLoc logger.Loc // The resolved source index for an internal import (within the bundle) or // nil for an external import (not included in the bundle) SourceIndex Index32 Flags ImportRecordFlags Kind ImportKind } type AssertEntry struct { Key []uint16 // An identifier or a string Value []uint16 // Always a string KeyLoc logger.Loc ValueLoc logger.Loc PreferQuotedKey bool } // This stores a 32-bit index where the zero value is an invalid index. This is // a better alternative to storing the index as a pointer since that has the // same properties but takes up more space and costs an extra pointer traversal. type Index32 struct { flippedBits uint32 } func MakeIndex32(index uint32) Index32 { return Index32{flippedBits: ^index} } func (i Index32) IsValid() bool { return i.flippedBits != 0 } func (i Index32) GetIndex() uint32 { return ^i.flippedBits } ================================================ FILE: lib/esbuild/compat/compat.go ================================================ package compat type v struct { major uint16 minor uint8 patch uint8 } // Returns <0 if "a < b" // Returns 0 if "a == b" // Returns >0 if "a > b" func compareVersions(a v, b []int) int { diff := int(a.major) if len(b) > 0 { diff -= b[0] } if diff == 0 { diff = int(a.minor) if len(b) > 1 { diff -= b[1] } } if diff == 0 { diff = int(a.patch) if len(b) > 2 { diff -= b[2] } } return diff } // The start is inclusive and the end is exclusive type versionRange struct { start v end v // Use 0.0.0 for "no end" } func isVersionSupported(ranges []versionRange, version []int) bool { for _, r := range ranges { if compareVersions(r.start, version) <= 0 && (r.end == (v{}) || compareVersions(r.end, version) > 0) { return true } } return false } ================================================ FILE: lib/esbuild/compat/css_table.go ================================================ package compat type CSSFeature uint32 const ( HexRGBA CSSFeature = 1 << iota RebeccaPurple // This feature includes all of the following: // - Allow floats in rgb() and rgba() // - hsl() can accept alpha values // - rgb() can accept alpha values // - Space-separated functional color notations Modern_RGB_HSL InsetProperty Nesting ) func (features CSSFeature) Has(feature CSSFeature) bool { return (features & feature) != 0 } var cssTable = map[CSSFeature]map[Engine][]versionRange{ // Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value HexRGBA: { Chrome: {{start: v{62, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, Firefox: {{start: v{49, 0, 0}}}, IOS: {{start: v{9, 3, 0}}}, Safari: {{start: v{9, 1, 0}}}, }, RebeccaPurple: { Chrome: {{start: v{38, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, Firefox: {{start: v{33, 0, 0}}}, IOS: {{start: v{8, 0, 0}}}, Safari: {{start: v{9, 0, 0}}}, }, Modern_RGB_HSL: { Chrome: {{start: v{66, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, Firefox: {{start: v{52, 0, 0}}}, IOS: {{start: v{12, 2, 0}}}, Safari: {{start: v{12, 1, 0}}}, }, // Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/inset InsetProperty: { Chrome: {{start: v{87, 0, 0}}}, Edge: {{start: v{87, 0, 0}}}, Firefox: {{start: v{66, 0, 0}}}, IOS: {{start: v{14, 5, 0}}}, Safari: {{start: v{14, 1, 0}}}, }, // This isn't supported anywhere right now: https://caniuse.com/css-nesting Nesting: {}, } // Return all features that are not available in at least one environment func UnsupportedCSSFeatures(constraints map[Engine][]int) (unsupported CSSFeature) { for feature, engines := range cssTable { for engine, version := range constraints { if engine == ES || engine == Node { // Specifying "--target=es2020" shouldn't affect CSS continue } if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) { unsupported |= feature } } } return } ================================================ FILE: lib/esbuild/compat/js_table.go ================================================ // This file was automatically generated by "compat-table.js" package compat type Engine uint8 const ( Chrome Engine = iota Edge ES Firefox IE IOS Node Opera Safari ) func (e Engine) String() string { switch e { case Chrome: return "chrome" case Edge: return "edge" case ES: return "es" case Firefox: return "firefox" case IE: return "ie" case IOS: return "ios" case Node: return "node" case Opera: return "opera" case Safari: return "safari" } return "" } type JSFeature uint64 const ( ArbitraryModuleNamespaceNames JSFeature = 1 << iota ArraySpread Arrow AsyncAwait AsyncGenerator BigInt Class ClassField ClassPrivateAccessor ClassPrivateBrandCheck ClassPrivateField ClassPrivateMethod ClassPrivateStaticAccessor ClassPrivateStaticField ClassPrivateStaticMethod ClassStaticBlocks ClassStaticField Const DefaultArgument Destructuring DynamicImport ExponentOperator ExportStarAs ForAwait ForOf Generator Hashbang ImportAssertions ImportMeta Let LogicalAssignment NestedRestBinding NewTarget NodeColonPrefixImport NodeColonPrefixRequire NullishCoalescing ObjectAccessors ObjectExtensions ObjectRestSpread OptionalCatchBinding OptionalChain RestArgument TemplateLiteral TopLevelAwait TypeofExoticObjectIsObject UnicodeEscapes ) func (features JSFeature) Has(feature JSFeature) bool { return (features & feature) != 0 } var jsTable = map[JSFeature]map[Engine][]versionRange{ ArbitraryModuleNamespaceNames: { Chrome: {{start: v{90, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{87, 0, 0}}}, Node: {{start: v{16, 0, 0}}}, }, ArraySpread: { Chrome: {{start: v{46, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{36, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{5, 0, 0}}}, Opera: {{start: v{33, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, Arrow: { Chrome: {{start: v{49, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{45, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{36, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, AsyncAwait: { Chrome: {{start: v{55, 0, 0}}}, Edge: {{start: v{15, 0, 0}}}, ES: {{start: v{2017, 0, 0}}}, Firefox: {{start: v{52, 0, 0}}}, IOS: {{start: v{11, 0, 0}}}, Node: {{start: v{7, 6, 0}}}, Opera: {{start: v{42, 0, 0}}}, Safari: {{start: v{11, 0, 0}}}, }, AsyncGenerator: { Chrome: {{start: v{63, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2018, 0, 0}}}, Firefox: {{start: v{57, 0, 0}}}, IOS: {{start: v{12, 0, 0}}}, Node: {{start: v{10, 0, 0}}}, Opera: {{start: v{50, 0, 0}}}, Safari: {{start: v{12, 0, 0}}}, }, BigInt: { Chrome: {{start: v{67, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2020, 0, 0}}}, Firefox: {{start: v{68, 0, 0}}}, IOS: {{start: v{14, 0, 0}}}, Node: {{start: v{10, 4, 0}}}, Opera: {{start: v{54, 0, 0}}}, Safari: {{start: v{14, 0, 0}}}, }, Class: { Chrome: {{start: v{49, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{45, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{36, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, ClassField: { Chrome: {{start: v{73, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{69, 0, 0}}}, IOS: {{start: v{14, 0, 0}}}, Node: {{start: v{12, 0, 0}}}, Opera: {{start: v{60, 0, 0}}}, Safari: {{start: v{14, 0, 0}}}, }, ClassPrivateAccessor: { Chrome: {{start: v{84, 0, 0}}}, Edge: {{start: v{84, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{14, 6, 0}}}, Opera: {{start: v{70, 0, 0}}}, Safari: {{start: v{15, 0, 0}}}, }, ClassPrivateBrandCheck: { Chrome: {{start: v{91, 0, 0}}}, Edge: {{start: v{91, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{16, 9, 0}}}, Opera: {{start: v{77, 0, 0}}}, Safari: {{start: v{15, 0, 0}}}, }, ClassPrivateField: { Chrome: {{start: v{84, 0, 0}}}, Edge: {{start: v{84, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{14, 6, 0}}}, Opera: {{start: v{70, 0, 0}}}, Safari: {{start: v{14, 1, 0}}}, }, ClassPrivateMethod: { Chrome: {{start: v{84, 0, 0}}}, Edge: {{start: v{84, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{14, 6, 0}}}, Opera: {{start: v{70, 0, 0}}}, Safari: {{start: v{15, 0, 0}}}, }, ClassPrivateStaticAccessor: { Chrome: {{start: v{84, 0, 0}}}, Edge: {{start: v{84, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{14, 6, 0}}}, Opera: {{start: v{70, 0, 0}}}, Safari: {{start: v{15, 0, 0}}}, }, ClassPrivateStaticField: { Chrome: {{start: v{74, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{12, 0, 0}}}, Opera: {{start: v{62, 0, 0}}}, Safari: {{start: v{14, 1, 0}}}, }, ClassPrivateStaticMethod: { Chrome: {{start: v{84, 0, 0}}}, Edge: {{start: v{84, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{90, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{14, 6, 0}}}, Opera: {{start: v{70, 0, 0}}}, Safari: {{start: v{15, 0, 0}}}, }, ClassStaticBlocks: { Chrome: {{start: v{91, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Node: {{start: v{16, 11, 0}}}, }, ClassStaticField: { Chrome: {{start: v{73, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{75, 0, 0}}}, IOS: {{start: v{15, 0, 0}}}, Node: {{start: v{12, 0, 0}}}, Opera: {{start: v{60, 0, 0}}}, Safari: {{start: v{14, 1, 0}}}, }, Const: { Chrome: {{start: v{49, 0, 0}}}, Edge: {{start: v{14, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{51, 0, 0}}}, IE: {{start: v{11, 0, 0}}}, IOS: {{start: v{11, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{36, 0, 0}}}, Safari: {{start: v{11, 0, 0}}}, }, DefaultArgument: { Chrome: {{start: v{49, 0, 0}}}, Edge: {{start: v{14, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{53, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{36, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, Destructuring: { Chrome: {{start: v{51, 0, 0}}}, Edge: {{start: v{18, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{53, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 5, 0}}}, Opera: {{start: v{38, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, DynamicImport: { Chrome: {{start: v{63, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{67, 0, 0}}}, IOS: {{start: v{11, 0, 0}}}, Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{13, 2, 0}}}, Safari: {{start: v{11, 1, 0}}}, }, ExponentOperator: { Chrome: {{start: v{52, 0, 0}}}, Edge: {{start: v{14, 0, 0}}}, ES: {{start: v{2016, 0, 0}}}, Firefox: {{start: v{52, 0, 0}}}, IOS: {{start: v{10, 3, 0}}}, Node: {{start: v{7, 0, 0}}}, Opera: {{start: v{39, 0, 0}}}, Safari: {{start: v{10, 1, 0}}}, }, ExportStarAs: { Chrome: {{start: v{72, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2020, 0, 0}}}, Firefox: {{start: v{80, 0, 0}}}, Node: {{start: v{12, 0, 0}}}, }, ForAwait: { Chrome: {{start: v{63, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2018, 0, 0}}}, Firefox: {{start: v{57, 0, 0}}}, IOS: {{start: v{12, 0, 0}}}, Node: {{start: v{10, 0, 0}}}, Opera: {{start: v{50, 0, 0}}}, Safari: {{start: v{12, 0, 0}}}, }, ForOf: { Chrome: {{start: v{51, 0, 0}}}, Edge: {{start: v{15, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{53, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 5, 0}}}, Opera: {{start: v{38, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, Generator: { Chrome: {{start: v{50, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{53, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{37, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, Hashbang: { Chrome: {{start: v{74, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, Firefox: {{start: v{67, 0, 0}}}, IOS: {{start: v{13, 4, 0}}}, Node: {{start: v{12, 0, 0}}}, Opera: {{start: v{62, 0, 0}}}, Safari: {{start: v{13, 1, 0}}}, }, ImportAssertions: { Chrome: {{start: v{91, 0, 0}}}, Node: {{start: v{16, 14, 0}}}, }, ImportMeta: { Chrome: {{start: v{64, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2020, 0, 0}}}, Firefox: {{start: v{62, 0, 0}}}, IOS: {{start: v{12, 0, 0}}}, Node: {{start: v{10, 4, 0}}}, Safari: {{start: v{11, 1, 0}}}, }, Let: { Chrome: {{start: v{49, 0, 0}}}, Edge: {{start: v{14, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{51, 0, 0}}}, IE: {{start: v{11, 0, 0}}}, IOS: {{start: v{11, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{36, 0, 0}}}, Safari: {{start: v{11, 0, 0}}}, }, LogicalAssignment: { Chrome: {{start: v{85, 0, 0}}}, Edge: {{start: v{85, 0, 0}}}, ES: {{start: v{2021, 0, 0}}}, Firefox: {{start: v{79, 0, 0}}}, IOS: {{start: v{14, 0, 0}}}, Node: {{start: v{15, 0, 0}}}, Opera: {{start: v{71, 0, 0}}}, Safari: {{start: v{14, 0, 0}}}, }, NestedRestBinding: { Chrome: {{start: v{49, 0, 0}}}, Edge: {{start: v{14, 0, 0}}}, ES: {{start: v{2016, 0, 0}}}, Firefox: {{start: v{47, 0, 0}}}, IOS: {{start: v{10, 3, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{36, 0, 0}}}, Safari: {{start: v{10, 1, 0}}}, }, NewTarget: { Chrome: {{start: v{46, 0, 0}}}, Edge: {{start: v{14, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{41, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{5, 0, 0}}}, Opera: {{start: v{33, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, NodeColonPrefixImport: { Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{14, 13, 1}}}, }, NodeColonPrefixRequire: { Node: {{start: v{14, 18, 0}, end: v{15, 0, 0}}, {start: v{16, 0, 0}}}, }, NullishCoalescing: { Chrome: {{start: v{80, 0, 0}}}, Edge: {{start: v{80, 0, 0}}}, ES: {{start: v{2020, 0, 0}}}, Firefox: {{start: v{72, 0, 0}}}, IOS: {{start: v{13, 4, 0}}}, Node: {{start: v{14, 0, 0}}}, Opera: {{start: v{67, 0, 0}}}, Safari: {{start: v{13, 1, 0}}}, }, ObjectAccessors: { Chrome: {{start: v{5, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, ES: {{start: v{5, 0, 0}}}, Firefox: {{start: v{2, 0, 0}}}, IE: {{start: v{9, 0, 0}}}, IOS: {{start: v{6, 0, 0}}}, Node: {{start: v{0, 10, 0}}}, Opera: {{start: v{10, 10, 0}}}, Safari: {{start: v{3, 1, 0}}}, }, ObjectExtensions: { Chrome: {{start: v{44, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{34, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{4, 0, 0}}}, Opera: {{start: v{31, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, ObjectRestSpread: { ES: {{start: v{2018, 0, 0}}}, Firefox: {{start: v{55, 0, 0}}}, IOS: {{start: v{11, 3, 0}}}, Opera: {{start: v{47, 0, 0}}}, Safari: {{start: v{11, 1, 0}}}, }, OptionalCatchBinding: { Chrome: {{start: v{66, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, ES: {{start: v{2019, 0, 0}}}, Firefox: {{start: v{58, 0, 0}}}, IOS: {{start: v{11, 3, 0}}}, Node: {{start: v{10, 0, 0}}}, Opera: {{start: v{53, 0, 0}}}, Safari: {{start: v{11, 1, 0}}}, }, OptionalChain: { Chrome: {{start: v{91, 0, 0}}}, Edge: {{start: v{91, 0, 0}}}, ES: {{start: v{2020, 0, 0}}}, Firefox: {{start: v{74, 0, 0}}}, IOS: {{start: v{13, 4, 0}}}, Node: {{start: v{16, 9, 0}}}, Opera: {{start: v{77, 0, 0}}}, Safari: {{start: v{13, 1, 0}}}, }, RestArgument: { Chrome: {{start: v{47, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{43, 0, 0}}}, IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{6, 0, 0}}}, Opera: {{start: v{34, 0, 0}}}, Safari: {{start: v{10, 0, 0}}}, }, TemplateLiteral: { Chrome: {{start: v{41, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{34, 0, 0}}}, IOS: {{start: v{9, 0, 0}}}, Node: {{start: v{4, 0, 0}}}, Opera: {{start: v{28, 0, 0}}}, Safari: {{start: v{9, 0, 0}}}, }, TopLevelAwait: { Chrome: {{start: v{89, 0, 0}}}, Edge: {{start: v{89, 0, 0}}}, ES: {{start: v{2022, 0, 0}}}, Firefox: {{start: v{89, 0, 0}}}, Node: {{start: v{14, 8, 0}}}, Safari: {{start: v{15, 0, 0}}}, }, TypeofExoticObjectIsObject: { Chrome: {{start: v{0, 0, 0}}}, Edge: {{start: v{0, 0, 0}}}, ES: {{start: v{2020, 0, 0}}}, Firefox: {{start: v{0, 0, 0}}}, IOS: {{start: v{0, 0, 0}}}, Node: {{start: v{0, 0, 0}}}, Opera: {{start: v{0, 0, 0}}}, Safari: {{start: v{0, 0, 0}}}, }, UnicodeEscapes: { Chrome: {{start: v{44, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, ES: {{start: v{2015, 0, 0}}}, Firefox: {{start: v{53, 0, 0}}}, IOS: {{start: v{9, 0, 0}}}, Node: {{start: v{4, 0, 0}}}, Opera: {{start: v{31, 0, 0}}}, Safari: {{start: v{9, 0, 0}}}, }, } // Return all features that are not available in at least one environment func UnsupportedJSFeatures(constraints map[Engine][]int) (unsupported JSFeature) { for feature, engines := range jsTable { for engine, version := range constraints { if versionRanges, ok := engines[engine]; !ok || !isVersionSupported(versionRanges, version) { unsupported |= feature } } } return } ================================================ FILE: lib/esbuild/config/config.go ================================================ package config import ( "fmt" "regexp" "strings" "sync" "github.com/withastro/compiler/lib/esbuild/ast" "github.com/withastro/compiler/lib/esbuild/compat" "github.com/withastro/compiler/lib/esbuild/logger" ) type JSXOptions struct { Factory JSXExpr Fragment JSXExpr Parse bool Preserve bool } type JSXExpr struct { Parts []string } type TSOptions struct { Parse bool NoAmbiguousLessThan bool } type Platform uint8 const ( PlatformBrowser Platform = iota PlatformNode PlatformNeutral ) type SourceMap uint8 const ( SourceMapNone SourceMap = iota SourceMapInline SourceMapLinkedWithComment SourceMapExternalWithoutComment SourceMapInlineAndExternal ) type LegalComments uint8 const ( LegalCommentsInline LegalComments = iota LegalCommentsNone LegalCommentsEndOfFile LegalCommentsLinkedWithComment LegalCommentsExternalWithoutComment ) func (lc LegalComments) HasExternalFile() bool { return lc == LegalCommentsLinkedWithComment || lc == LegalCommentsExternalWithoutComment } type Loader uint8 const ( LoaderNone Loader = iota LoaderJS LoaderJSX LoaderTS LoaderTSNoAmbiguousLessThan // Used with ".mts" and ".cts" LoaderTSX LoaderJSON LoaderText LoaderBase64 LoaderDataURL LoaderFile LoaderBinary LoaderCSS LoaderDefault ) func (loader Loader) IsTypeScript() bool { switch loader { case LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX: return true default: return false } } func (loader Loader) CanHaveSourceMap() bool { switch loader { case LoaderJS, LoaderJSX, LoaderTS, LoaderTSNoAmbiguousLessThan, LoaderTSX, LoaderCSS: return true default: return false } } type Format uint8 const ( // This is used when not bundling. It means to preserve whatever form the // import or export was originally in. ES6 syntax stays ES6 syntax and // CommonJS syntax stays CommonJS syntax. FormatPreserve Format = iota // IIFE stands for immediately-invoked function expression. That looks like // this: // // (() => { // ... bundled code ... // })(); // // If the optional GlobalName is configured, then we'll write out this: // // let globalName = (() => { // ... bundled code ... // return exports; // })(); // FormatIIFE // The CommonJS format looks like this: // // ... bundled code ... // module.exports = exports; // FormatCommonJS // The ES module format looks like this: // // ... bundled code ... // export {...}; // FormatESModule ) func (f Format) KeepES6ImportExportSyntax() bool { return f == FormatPreserve || f == FormatESModule } func (f Format) String() string { switch f { case FormatIIFE: return "iife" case FormatCommonJS: return "cjs" case FormatESModule: return "esm" } return "" } type StdinInfo struct { Contents string SourceFile string AbsResolveDir string Loader Loader } type WildcardPattern struct { Prefix string Suffix string } type ExternalMatchers struct { Exact map[string]bool Patterns []WildcardPattern } func (matchers ExternalMatchers) HasMatchers() bool { return len(matchers.Exact) > 0 || len(matchers.Patterns) > 0 } type ExternalSettings struct { PreResolve ExternalMatchers PostResolve ExternalMatchers } type Mode uint8 const ( ModePassThrough Mode = iota ModeConvertFormat ModeBundle ) type MaybeBool uint8 const ( Unspecified MaybeBool = iota True False ) type Options struct { TSTarget *TSTarget MangleProps *regexp.Regexp ReserveProps *regexp.Regexp // When mangling property names, call this function with a callback and do // the property name mangling inside the callback. The callback takes an // argument which is the mangle cache map to mutate. These callbacks are // serialized so mutating the map does not require extra synchronization. // // This is a callback for determinism reasons. We may be building multiple // entry points in parallel that are supposed to share a single cache. We // don't want the order that each entry point mangles properties in to cause // the output to change, so we serialize the property mangling over all entry // points in entry point order. However, we still want to link everything in // parallel so only property mangling is serialized, which is implemented by // this function blocking until the previous entry point's property mangling // has finished. ExclusiveMangleCacheUpdate func(cb func(mangleCache map[string]interface{})) // This is the original information that was used to generate the // unsupported feature sets above. It's used for error messages. OriginalTargetEnv string ExtensionOrder []string MainFields []string Conditions []string AbsNodePaths []string // The "NODE_PATH" variable from Node.js ExternalSettings ExternalSettings AbsOutputFile string AbsOutputDir string AbsOutputBase string OutputExtensionJS string OutputExtensionCSS string GlobalName []string TsConfigOverride string ExtensionToLoader map[string]Loader PublicPath string InjectAbsPaths []string InjectedDefines []InjectedDefine InjectedFiles []InjectedFile JSBanner string JSFooter string CSSBanner string CSSFooter string EntryPathTemplate []PathTemplate ChunkPathTemplate []PathTemplate AssetPathTemplate []PathTemplate Plugins []Plugin SourceRoot string Stdin *StdinInfo JSX JSXOptions UnsupportedJSFeatures compat.JSFeature UnsupportedCSSFeatures compat.CSSFeature TS TSOptions Mode Mode PreserveSymlinks bool MinifyWhitespace bool MinifyIdentifiers bool MinifySyntax bool ProfilerNames bool CodeSplitting bool WatchMode bool AllowOverwrite bool LegalComments LegalComments // If true, make sure to generate a single file that can be written to stdout WriteToStdout bool OmitRuntimeForTests bool UnusedImportsTS UnusedImportsTS UseDefineForClassFields MaybeBool ASCIIOnly bool KeepNames bool IgnoreDCEAnnotations bool TreeShaking bool DropDebugger bool MangleQuoted bool Platform Platform TargetFromAPI TargetFromAPI OutputFormat Format NeedsMetafile bool SourceMap SourceMap ExcludeSourcesContent bool } type TargetFromAPI uint8 const ( // In this state, the "target" field in "tsconfig.json" is respected TargetWasUnconfigured TargetFromAPI = iota // In this state, the "target" field in "tsconfig.json" is overridden TargetWasConfigured // In this state, "useDefineForClassFields" is true unless overridden TargetWasConfiguredIncludingESNext ) type UnusedImportsTS uint8 const ( // "import { unused } from 'foo'" => "" (TypeScript's default behavior) UnusedImportsRemoveStmt UnusedImportsTS = iota // "import { unused } from 'foo'" => "import 'foo'" ("importsNotUsedAsValues" != "remove") UnusedImportsKeepStmtRemoveValues // "import { unused } from 'foo'" => "import { unused } from 'foo'" ("preserveValueImports" == true) UnusedImportsKeepValues ) func UnusedImportsFromTsconfigValues(preserveImportsNotUsedAsValues bool, preserveValueImports bool) UnusedImportsTS { if preserveValueImports { return UnusedImportsKeepValues } if preserveImportsNotUsedAsValues { return UnusedImportsKeepStmtRemoveValues } return UnusedImportsRemoveStmt } type TSTarget struct { Target string Source logger.Source Range logger.Range UnsupportedJSFeatures compat.JSFeature } type PathPlaceholder uint8 const ( NoPlaceholder PathPlaceholder = iota // The relative path from the original parent directory to the configured // "outbase" directory, or to the lowest common ancestor directory DirPlaceholder // The original name of the file, or the manual chunk name, or the name of // the type of output file ("entry" or "chunk" or "asset") NamePlaceholder // A hash of the contents of this file, and the contents and output paths of // all dependencies (except for their hash placeholders) HashPlaceholder // The original extension of the file, or the name of the output file // (e.g. "css", "svg", "png") ExtPlaceholder ) type PathTemplate struct { Data string Placeholder PathPlaceholder } type PathPlaceholders struct { Dir *string Name *string Hash *string Ext *string } func (placeholders PathPlaceholders) Get(placeholder PathPlaceholder) *string { switch placeholder { case DirPlaceholder: return placeholders.Dir case NamePlaceholder: return placeholders.Name case HashPlaceholder: return placeholders.Hash case ExtPlaceholder: return placeholders.Ext } return nil } func TemplateToString(template []PathTemplate) string { if len(template) == 1 && template[0].Placeholder == NoPlaceholder { // Avoid allocations in this case return template[0].Data } sb := strings.Builder{} for _, part := range template { sb.WriteString(part.Data) switch part.Placeholder { case DirPlaceholder: sb.WriteString("[dir]") case NamePlaceholder: sb.WriteString("[name]") case HashPlaceholder: sb.WriteString("[hash]") case ExtPlaceholder: sb.WriteString("[ext]") } } return sb.String() } func HasPlaceholder(template []PathTemplate, placeholder PathPlaceholder) bool { for _, part := range template { if part.Placeholder == placeholder { return true } } return false } func SubstituteTemplate(template []PathTemplate, placeholders PathPlaceholders) []PathTemplate { // Don't allocate if no substitution is possible and the template is already minimal shouldSubstitute := false for i, part := range template { if placeholders.Get(part.Placeholder) != nil || (part.Placeholder == NoPlaceholder && i+1 < len(template)) { shouldSubstitute = true break } } if !shouldSubstitute { return template } // Otherwise, substitute and merge as appropriate result := make([]PathTemplate, 0, len(template)) for _, part := range template { if sub := placeholders.Get(part.Placeholder); sub != nil { part.Data += *sub part.Placeholder = NoPlaceholder } if last := len(result) - 1; last >= 0 && result[last].Placeholder == NoPlaceholder { last := &result[last] last.Data += part.Data last.Placeholder = part.Placeholder } else { result = append(result, part) } } return result } func ShouldCallRuntimeRequire(mode Mode, outputFormat Format) bool { return mode == ModeBundle && outputFormat != FormatCommonJS } type InjectedDefine struct { Name string Source logger.Source } type InjectedFile struct { Exports []InjectableExport DefineName string Source logger.Source } type InjectableExport struct { Alias string Loc logger.Loc } var filterMutex sync.Mutex var filterCache map[string]*regexp.Regexp func compileFilter(filter string) (result *regexp.Regexp) { if filter == "" { // Must provide a filter return nil } ok := false // Cache hit? (func() { filterMutex.Lock() defer filterMutex.Unlock() if filterCache != nil { result, ok = filterCache[filter] } })() if ok { return } // Cache miss result, err := regexp.Compile(filter) if err != nil { return nil } // Cache for next time filterMutex.Lock() defer filterMutex.Unlock() if filterCache == nil { filterCache = make(map[string]*regexp.Regexp) } filterCache[filter] = result return } func CompileFilterForPlugin(pluginName string, kind string, filter string) (*regexp.Regexp, error) { if filter == "" { return nil, fmt.Errorf("[%s] %q is missing a filter", pluginName, kind) } result := compileFilter(filter) if result == nil { return nil, fmt.Errorf("[%s] %q filter is not a valid Go regular expression: %q", pluginName, kind, filter) } return result, nil } func PluginAppliesToPath(path logger.Path, filter *regexp.Regexp, namespace string) bool { return (namespace == "" || path.Namespace == namespace) && filter.MatchString(path.Text) } //////////////////////////////////////////////////////////////////////////////// // Plugin API type Plugin struct { Name string OnStart []OnStart OnResolve []OnResolve OnLoad []OnLoad } type OnStart struct { Callback func() OnStartResult Name string } type OnStartResult struct { ThrownError error Msgs []logger.Msg } type OnResolve struct { Filter *regexp.Regexp Callback func(OnResolveArgs) OnResolveResult Name string Namespace string } type OnResolveArgs struct { Path string ResolveDir string PluginData interface{} Importer logger.Path Kind ast.ImportKind } type OnResolveResult struct { PluginName string Msgs []logger.Msg ThrownError error AbsWatchFiles []string AbsWatchDirs []string PluginData interface{} Path logger.Path External bool IsSideEffectFree bool } type OnLoad struct { Filter *regexp.Regexp Callback func(OnLoadArgs) OnLoadResult Name string Namespace string } type OnLoadArgs struct { PluginData interface{} Path logger.Path } type OnLoadResult struct { PluginName string Contents *string AbsResolveDir string PluginData interface{} Msgs []logger.Msg ThrownError error AbsWatchFiles []string AbsWatchDirs []string Loader Loader } ================================================ FILE: lib/esbuild/config/globals.go ================================================ package config import ( "sync" ) var processedGlobalsMutex sync.Mutex // If something is in this list, then a direct identifier expression or property // access chain matching this will be assumed to have no side effects and will // be removed. // // This also means code is allowed to be reordered past things in this list. For // example, if "console.log" is in this list, permitting reordering allows for // "if (a) console.log(b); else console.log(c)" to be reordered and transformed // into "console.log(a ? b : c)". Notice that "a" and "console.log" are in a // different order, which can only happen if evaluating the "console.log" // property access can be assumed to not change the value of "a". // // Note that membership in this list says nothing about whether calling any of // these functions has any side effects. It only says something about // referencing these function without calling them. var knownGlobals = [][]string{ // These global identifiers should exist in all JavaScript environments. This // deliberately omits "NaN", "Infinity", and "undefined" because these are // treated as automatically-inlined constants instead of identifiers. {"Array"}, {"Boolean"}, {"Function"}, {"Math"}, {"Number"}, {"Object"}, {"RegExp"}, {"String"}, // Object: Static methods // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Static_methods {"Object", "assign"}, {"Object", "create"}, {"Object", "defineProperties"}, {"Object", "defineProperty"}, {"Object", "entries"}, {"Object", "freeze"}, {"Object", "fromEntries"}, {"Object", "getOwnPropertyDescriptor"}, {"Object", "getOwnPropertyDescriptors"}, {"Object", "getOwnPropertyNames"}, {"Object", "getOwnPropertySymbols"}, {"Object", "getPrototypeOf"}, {"Object", "is"}, {"Object", "isExtensible"}, {"Object", "isFrozen"}, {"Object", "isSealed"}, {"Object", "keys"}, {"Object", "preventExtensions"}, {"Object", "seal"}, {"Object", "setPrototypeOf"}, {"Object", "values"}, // Object: Instance methods // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object#Instance_methods {"Object", "prototype", "__defineGetter__"}, {"Object", "prototype", "__defineSetter__"}, {"Object", "prototype", "__lookupGetter__"}, {"Object", "prototype", "__lookupSetter__"}, {"Object", "prototype", "hasOwnProperty"}, {"Object", "prototype", "isPrototypeOf"}, {"Object", "prototype", "propertyIsEnumerable"}, {"Object", "prototype", "toLocaleString"}, {"Object", "prototype", "toString"}, {"Object", "prototype", "unwatch"}, {"Object", "prototype", "valueOf"}, {"Object", "prototype", "watch"}, // Math: Static properties // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_properties {"Math", "E"}, {"Math", "LN10"}, {"Math", "LN2"}, {"Math", "LOG10E"}, {"Math", "LOG2E"}, {"Math", "PI"}, {"Math", "SQRT1_2"}, {"Math", "SQRT2"}, // Math: Static methods // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math#Static_methods {"Math", "abs"}, {"Math", "acos"}, {"Math", "acosh"}, {"Math", "asin"}, {"Math", "asinh"}, {"Math", "atan"}, {"Math", "atan2"}, {"Math", "atanh"}, {"Math", "cbrt"}, {"Math", "ceil"}, {"Math", "clz32"}, {"Math", "cos"}, {"Math", "cosh"}, {"Math", "exp"}, {"Math", "expm1"}, {"Math", "floor"}, {"Math", "fround"}, {"Math", "hypot"}, {"Math", "imul"}, {"Math", "log"}, {"Math", "log10"}, {"Math", "log1p"}, {"Math", "log2"}, {"Math", "max"}, {"Math", "min"}, {"Math", "pow"}, {"Math", "random"}, {"Math", "round"}, {"Math", "sign"}, {"Math", "sin"}, {"Math", "sinh"}, {"Math", "sqrt"}, {"Math", "tan"}, {"Math", "tanh"}, {"Math", "trunc"}, // Reflect: Static methods // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Reflect#static_methods {"Reflect", "apply"}, {"Reflect", "construct"}, {"Reflect", "defineProperty"}, {"Reflect", "deleteProperty"}, {"Reflect", "get"}, {"Reflect", "getOwnPropertyDescriptor"}, {"Reflect", "getPrototypeOf"}, {"Reflect", "has"}, {"Reflect", "isExtensible"}, {"Reflect", "ownKeys"}, {"Reflect", "preventExtensions"}, {"Reflect", "set"}, {"Reflect", "setPrototypeOf"}, // Other globals present in both the browser and node (except "eval" because // it has special behavior) {"AbortController"}, {"AbortSignal"}, {"AggregateError"}, {"ArrayBuffer"}, {"BigInt"}, {"DataView"}, {"Date"}, {"Error"}, {"EvalError"}, {"Event"}, {"EventTarget"}, {"Float32Array"}, {"Float64Array"}, {"Int16Array"}, {"Int32Array"}, {"Int8Array"}, {"Intl"}, {"JSON"}, {"Map"}, {"MessageChannel"}, {"MessageEvent"}, {"MessagePort"}, {"Promise"}, {"Proxy"}, {"RangeError"}, {"ReferenceError"}, {"Reflect"}, {"Set"}, {"Symbol"}, {"SyntaxError"}, {"TextDecoder"}, {"TextEncoder"}, {"TypeError"}, {"URIError"}, {"URL"}, {"URLSearchParams"}, {"Uint16Array"}, {"Uint32Array"}, {"Uint8Array"}, {"Uint8ClampedArray"}, {"WeakMap"}, {"WeakSet"}, {"WebAssembly"}, {"clearInterval"}, {"clearTimeout"}, {"console"}, {"decodeURI"}, {"decodeURIComponent"}, {"encodeURI"}, {"encodeURIComponent"}, {"escape"}, {"globalThis"}, {"isFinite"}, {"isNaN"}, {"parseFloat"}, {"parseInt"}, {"queueMicrotask"}, {"setInterval"}, {"setTimeout"}, {"unescape"}, // Console method references are assumed to have no side effects // https://developer.mozilla.org/en-US/docs/Web/API/console {"console", "assert"}, {"console", "clear"}, {"console", "count"}, {"console", "countReset"}, {"console", "debug"}, {"console", "dir"}, {"console", "dirxml"}, {"console", "error"}, {"console", "group"}, {"console", "groupCollapsed"}, {"console", "groupEnd"}, {"console", "info"}, {"console", "log"}, {"console", "table"}, {"console", "time"}, {"console", "timeEnd"}, {"console", "timeLog"}, {"console", "trace"}, {"console", "warn"}, // CSSOM APIs {"CSSAnimation"}, {"CSSFontFaceRule"}, {"CSSImportRule"}, {"CSSKeyframeRule"}, {"CSSKeyframesRule"}, {"CSSMediaRule"}, {"CSSNamespaceRule"}, {"CSSPageRule"}, {"CSSRule"}, {"CSSRuleList"}, {"CSSStyleDeclaration"}, {"CSSStyleRule"}, {"CSSStyleSheet"}, {"CSSSupportsRule"}, {"CSSTransition"}, // SVG DOM {"SVGAElement"}, {"SVGAngle"}, {"SVGAnimateElement"}, {"SVGAnimateMotionElement"}, {"SVGAnimateTransformElement"}, {"SVGAnimatedAngle"}, {"SVGAnimatedBoolean"}, {"SVGAnimatedEnumeration"}, {"SVGAnimatedInteger"}, {"SVGAnimatedLength"}, {"SVGAnimatedLengthList"}, {"SVGAnimatedNumber"}, {"SVGAnimatedNumberList"}, {"SVGAnimatedPreserveAspectRatio"}, {"SVGAnimatedRect"}, {"SVGAnimatedString"}, {"SVGAnimatedTransformList"}, {"SVGAnimationElement"}, {"SVGCircleElement"}, {"SVGClipPathElement"}, {"SVGComponentTransferFunctionElement"}, {"SVGDefsElement"}, {"SVGDescElement"}, {"SVGElement"}, {"SVGEllipseElement"}, {"SVGFEBlendElement"}, {"SVGFEColorMatrixElement"}, {"SVGFEComponentTransferElement"}, {"SVGFECompositeElement"}, {"SVGFEConvolveMatrixElement"}, {"SVGFEDiffuseLightingElement"}, {"SVGFEDisplacementMapElement"}, {"SVGFEDistantLightElement"}, {"SVGFEDropShadowElement"}, {"SVGFEFloodElement"}, {"SVGFEFuncAElement"}, {"SVGFEFuncBElement"}, {"SVGFEFuncGElement"}, {"SVGFEFuncRElement"}, {"SVGFEGaussianBlurElement"}, {"SVGFEImageElement"}, {"SVGFEMergeElement"}, {"SVGFEMergeNodeElement"}, {"SVGFEMorphologyElement"}, {"SVGFEOffsetElement"}, {"SVGFEPointLightElement"}, {"SVGFESpecularLightingElement"}, {"SVGFESpotLightElement"}, {"SVGFETileElement"}, {"SVGFETurbulenceElement"}, {"SVGFilterElement"}, {"SVGForeignObjectElement"}, {"SVGGElement"}, {"SVGGeometryElement"}, {"SVGGradientElement"}, {"SVGGraphicsElement"}, {"SVGImageElement"}, {"SVGLength"}, {"SVGLengthList"}, {"SVGLineElement"}, {"SVGLinearGradientElement"}, {"SVGMPathElement"}, {"SVGMarkerElement"}, {"SVGMaskElement"}, {"SVGMatrix"}, {"SVGMetadataElement"}, {"SVGNumber"}, {"SVGNumberList"}, {"SVGPathElement"}, {"SVGPatternElement"}, {"SVGPoint"}, {"SVGPointList"}, {"SVGPolygonElement"}, {"SVGPolylineElement"}, {"SVGPreserveAspectRatio"}, {"SVGRadialGradientElement"}, {"SVGRect"}, {"SVGRectElement"}, {"SVGSVGElement"}, {"SVGScriptElement"}, {"SVGSetElement"}, {"SVGStopElement"}, {"SVGStringList"}, {"SVGStyleElement"}, {"SVGSwitchElement"}, {"SVGSymbolElement"}, {"SVGTSpanElement"}, {"SVGTextContentElement"}, {"SVGTextElement"}, {"SVGTextPathElement"}, {"SVGTextPositioningElement"}, {"SVGTitleElement"}, {"SVGTransform"}, {"SVGTransformList"}, {"SVGUnitTypes"}, {"SVGUseElement"}, {"SVGViewElement"}, // Other browser APIs // // This list contains all globals present in modern versions of Chrome, Safari, // and Firefox except for the following properties, since they have a side effect // of triggering layout (https://gist.github.com/paulirish/5d52fb081b3570c81e3a): // // - scrollX // - scrollY // - innerWidth // - innerHeight // - pageXOffset // - pageYOffset // // The following globals have also been removed since they sometimes throw an // exception when accessed, which is a side effect (for more information see // https://stackoverflow.com/a/33047477): // // - localStorage // - sessionStorage // {"AnalyserNode"}, {"Animation"}, {"AnimationEffect"}, {"AnimationEvent"}, {"AnimationPlaybackEvent"}, {"AnimationTimeline"}, {"Attr"}, {"Audio"}, {"AudioBuffer"}, {"AudioBufferSourceNode"}, {"AudioDestinationNode"}, {"AudioListener"}, {"AudioNode"}, {"AudioParam"}, {"AudioProcessingEvent"}, {"AudioScheduledSourceNode"}, {"BarProp"}, {"BeforeUnloadEvent"}, {"BiquadFilterNode"}, {"Blob"}, {"BlobEvent"}, {"ByteLengthQueuingStrategy"}, {"CDATASection"}, {"CSS"}, {"CanvasGradient"}, {"CanvasPattern"}, {"CanvasRenderingContext2D"}, {"ChannelMergerNode"}, {"ChannelSplitterNode"}, {"CharacterData"}, {"ClipboardEvent"}, {"CloseEvent"}, {"Comment"}, {"CompositionEvent"}, {"ConvolverNode"}, {"CountQueuingStrategy"}, {"Crypto"}, {"CustomElementRegistry"}, {"CustomEvent"}, {"DOMException"}, {"DOMImplementation"}, {"DOMMatrix"}, {"DOMMatrixReadOnly"}, {"DOMParser"}, {"DOMPoint"}, {"DOMPointReadOnly"}, {"DOMQuad"}, {"DOMRect"}, {"DOMRectList"}, {"DOMRectReadOnly"}, {"DOMStringList"}, {"DOMStringMap"}, {"DOMTokenList"}, {"DataTransfer"}, {"DataTransferItem"}, {"DataTransferItemList"}, {"DelayNode"}, {"Document"}, {"DocumentFragment"}, {"DocumentTimeline"}, {"DocumentType"}, {"DragEvent"}, {"DynamicsCompressorNode"}, {"Element"}, {"ErrorEvent"}, {"EventSource"}, {"File"}, {"FileList"}, {"FileReader"}, {"FocusEvent"}, {"FontFace"}, {"FormData"}, {"GainNode"}, {"Gamepad"}, {"GamepadButton"}, {"GamepadEvent"}, {"Geolocation"}, {"GeolocationPositionError"}, {"HTMLAllCollection"}, {"HTMLAnchorElement"}, {"HTMLAreaElement"}, {"HTMLAudioElement"}, {"HTMLBRElement"}, {"HTMLBaseElement"}, {"HTMLBodyElement"}, {"HTMLButtonElement"}, {"HTMLCanvasElement"}, {"HTMLCollection"}, {"HTMLDListElement"}, {"HTMLDataElement"}, {"HTMLDataListElement"}, {"HTMLDetailsElement"}, {"HTMLDirectoryElement"}, {"HTMLDivElement"}, {"HTMLDocument"}, {"HTMLElement"}, {"HTMLEmbedElement"}, {"HTMLFieldSetElement"}, {"HTMLFontElement"}, {"HTMLFormControlsCollection"}, {"HTMLFormElement"}, {"HTMLFrameElement"}, {"HTMLFrameSetElement"}, {"HTMLHRElement"}, {"HTMLHeadElement"}, {"HTMLHeadingElement"}, {"HTMLHtmlElement"}, {"HTMLIFrameElement"}, {"HTMLImageElement"}, {"HTMLInputElement"}, {"HTMLLIElement"}, {"HTMLLabelElement"}, {"HTMLLegendElement"}, {"HTMLLinkElement"}, {"HTMLMapElement"}, {"HTMLMarqueeElement"}, {"HTMLMediaElement"}, {"HTMLMenuElement"}, {"HTMLMetaElement"}, {"HTMLMeterElement"}, {"HTMLModElement"}, {"HTMLOListElement"}, {"HTMLObjectElement"}, {"HTMLOptGroupElement"}, {"HTMLOptionElement"}, {"HTMLOptionsCollection"}, {"HTMLOutputElement"}, {"HTMLParagraphElement"}, {"HTMLParamElement"}, {"HTMLPictureElement"}, {"HTMLPreElement"}, {"HTMLProgressElement"}, {"HTMLQuoteElement"}, {"HTMLScriptElement"}, {"HTMLSelectElement"}, {"HTMLSlotElement"}, {"HTMLSourceElement"}, {"HTMLSpanElement"}, {"HTMLStyleElement"}, {"HTMLTableCaptionElement"}, {"HTMLTableCellElement"}, {"HTMLTableColElement"}, {"HTMLTableElement"}, {"HTMLTableRowElement"}, {"HTMLTableSectionElement"}, {"HTMLTemplateElement"}, {"HTMLTextAreaElement"}, {"HTMLTimeElement"}, {"HTMLTitleElement"}, {"HTMLTrackElement"}, {"HTMLUListElement"}, {"HTMLUnknownElement"}, {"HTMLVideoElement"}, {"HashChangeEvent"}, {"Headers"}, {"History"}, {"IDBCursor"}, {"IDBCursorWithValue"}, {"IDBDatabase"}, {"IDBFactory"}, {"IDBIndex"}, {"IDBKeyRange"}, {"IDBObjectStore"}, {"IDBOpenDBRequest"}, {"IDBRequest"}, {"IDBTransaction"}, {"IDBVersionChangeEvent"}, {"Image"}, {"ImageData"}, {"InputEvent"}, {"IntersectionObserver"}, {"IntersectionObserverEntry"}, {"KeyboardEvent"}, {"KeyframeEffect"}, {"Location"}, {"MediaCapabilities"}, {"MediaElementAudioSourceNode"}, {"MediaEncryptedEvent"}, {"MediaError"}, {"MediaList"}, {"MediaQueryList"}, {"MediaQueryListEvent"}, {"MediaRecorder"}, {"MediaSource"}, {"MediaStream"}, {"MediaStreamAudioDestinationNode"}, {"MediaStreamAudioSourceNode"}, {"MediaStreamTrack"}, {"MediaStreamTrackEvent"}, {"MimeType"}, {"MimeTypeArray"}, {"MouseEvent"}, {"MutationEvent"}, {"MutationObserver"}, {"MutationRecord"}, {"NamedNodeMap"}, {"Navigator"}, {"Node"}, {"NodeFilter"}, {"NodeIterator"}, {"NodeList"}, {"Notification"}, {"OfflineAudioCompletionEvent"}, {"Option"}, {"OscillatorNode"}, {"PageTransitionEvent"}, {"Path2D"}, {"Performance"}, {"PerformanceEntry"}, {"PerformanceMark"}, {"PerformanceMeasure"}, {"PerformanceNavigation"}, {"PerformanceObserver"}, {"PerformanceObserverEntryList"}, {"PerformanceResourceTiming"}, {"PerformanceTiming"}, {"PeriodicWave"}, {"Plugin"}, {"PluginArray"}, {"PointerEvent"}, {"PopStateEvent"}, {"ProcessingInstruction"}, {"ProgressEvent"}, {"PromiseRejectionEvent"}, {"RTCCertificate"}, {"RTCDTMFSender"}, {"RTCDTMFToneChangeEvent"}, {"RTCDataChannel"}, {"RTCDataChannelEvent"}, {"RTCIceCandidate"}, {"RTCPeerConnection"}, {"RTCPeerConnectionIceEvent"}, {"RTCRtpReceiver"}, {"RTCRtpSender"}, {"RTCRtpTransceiver"}, {"RTCSessionDescription"}, {"RTCStatsReport"}, {"RTCTrackEvent"}, {"RadioNodeList"}, {"Range"}, {"ReadableStream"}, {"Request"}, {"ResizeObserver"}, {"ResizeObserverEntry"}, {"Response"}, {"Screen"}, {"ScriptProcessorNode"}, {"SecurityPolicyViolationEvent"}, {"Selection"}, {"ShadowRoot"}, {"SourceBuffer"}, {"SourceBufferList"}, {"SpeechSynthesisEvent"}, {"SpeechSynthesisUtterance"}, {"StaticRange"}, {"Storage"}, {"StorageEvent"}, {"StyleSheet"}, {"StyleSheetList"}, {"Text"}, {"TextMetrics"}, {"TextTrack"}, {"TextTrackCue"}, {"TextTrackCueList"}, {"TextTrackList"}, {"TimeRanges"}, {"TrackEvent"}, {"TransitionEvent"}, {"TreeWalker"}, {"UIEvent"}, {"VTTCue"}, {"ValidityState"}, {"VisualViewport"}, {"WaveShaperNode"}, {"WebGLActiveInfo"}, {"WebGLBuffer"}, {"WebGLContextEvent"}, {"WebGLFramebuffer"}, {"WebGLProgram"}, {"WebGLQuery"}, {"WebGLRenderbuffer"}, {"WebGLRenderingContext"}, {"WebGLSampler"}, {"WebGLShader"}, {"WebGLShaderPrecisionFormat"}, {"WebGLSync"}, {"WebGLTexture"}, {"WebGLUniformLocation"}, {"WebKitCSSMatrix"}, {"WebSocket"}, {"WheelEvent"}, {"Window"}, {"Worker"}, {"XMLDocument"}, {"XMLHttpRequest"}, {"XMLHttpRequestEventTarget"}, {"XMLHttpRequestUpload"}, {"XMLSerializer"}, {"XPathEvaluator"}, {"XPathExpression"}, {"XPathResult"}, {"XSLTProcessor"}, {"alert"}, {"atob"}, {"blur"}, {"btoa"}, {"cancelAnimationFrame"}, {"captureEvents"}, {"close"}, {"closed"}, {"confirm"}, {"customElements"}, {"devicePixelRatio"}, {"document"}, {"event"}, {"fetch"}, {"find"}, {"focus"}, {"frameElement"}, {"frames"}, {"getComputedStyle"}, {"getSelection"}, {"history"}, {"indexedDB"}, {"isSecureContext"}, {"length"}, {"location"}, {"locationbar"}, {"matchMedia"}, {"menubar"}, {"moveBy"}, {"moveTo"}, {"name"}, {"navigator"}, {"onabort"}, {"onafterprint"}, {"onanimationend"}, {"onanimationiteration"}, {"onanimationstart"}, {"onbeforeprint"}, {"onbeforeunload"}, {"onblur"}, {"oncanplay"}, {"oncanplaythrough"}, {"onchange"}, {"onclick"}, {"oncontextmenu"}, {"oncuechange"}, {"ondblclick"}, {"ondrag"}, {"ondragend"}, {"ondragenter"}, {"ondragleave"}, {"ondragover"}, {"ondragstart"}, {"ondrop"}, {"ondurationchange"}, {"onemptied"}, {"onended"}, {"onerror"}, {"onfocus"}, {"ongotpointercapture"}, {"onhashchange"}, {"oninput"}, {"oninvalid"}, {"onkeydown"}, {"onkeypress"}, {"onkeyup"}, {"onlanguagechange"}, {"onload"}, {"onloadeddata"}, {"onloadedmetadata"}, {"onloadstart"}, {"onlostpointercapture"}, {"onmessage"}, {"onmousedown"}, {"onmouseenter"}, {"onmouseleave"}, {"onmousemove"}, {"onmouseout"}, {"onmouseover"}, {"onmouseup"}, {"onoffline"}, {"ononline"}, {"onpagehide"}, {"onpageshow"}, {"onpause"}, {"onplay"}, {"onplaying"}, {"onpointercancel"}, {"onpointerdown"}, {"onpointerenter"}, {"onpointerleave"}, {"onpointermove"}, {"onpointerout"}, {"onpointerover"}, {"onpointerup"}, {"onpopstate"}, {"onprogress"}, {"onratechange"}, {"onrejectionhandled"}, {"onreset"}, {"onresize"}, {"onscroll"}, {"onseeked"}, {"onseeking"}, {"onselect"}, {"onstalled"}, {"onstorage"}, {"onsubmit"}, {"onsuspend"}, {"ontimeupdate"}, {"ontoggle"}, {"ontransitioncancel"}, {"ontransitionend"}, {"ontransitionrun"}, {"ontransitionstart"}, {"onunhandledrejection"}, {"onunload"}, {"onvolumechange"}, {"onwaiting"}, {"onwebkitanimationend"}, {"onwebkitanimationiteration"}, {"onwebkitanimationstart"}, {"onwebkittransitionend"}, {"onwheel"}, {"open"}, {"opener"}, {"origin"}, {"outerHeight"}, {"outerWidth"}, {"parent"}, {"performance"}, {"personalbar"}, {"postMessage"}, {"print"}, {"prompt"}, {"releaseEvents"}, {"requestAnimationFrame"}, {"resizeBy"}, {"resizeTo"}, {"screen"}, {"screenLeft"}, {"screenTop"}, {"screenX"}, {"screenY"}, {"scroll"}, {"scrollBy"}, {"scrollTo"}, {"scrollbars"}, {"self"}, {"speechSynthesis"}, {"status"}, {"statusbar"}, {"stop"}, {"toolbar"}, {"top"}, {"webkitURL"}, {"window"}, } type DefineData struct { // True if accessing this value is known to not have any side effects. For // example, a bare reference to "Object.create" can be removed because it // does not have any observable side effects. CanBeRemovedIfUnused bool // True if a call to this value is known to not have any side effects. For // example, a bare call to "Object()" can be removed because it does not // have any observable side effects. CallCanBeUnwrappedIfUnused bool // If true, the user has indicated that every direct calls to a property on // this object and all of that call's arguments are to be removed from the // output, even when the arguments have side effects. This is used to // implement the "--drop:console" flag. MethodCallsMustBeReplacedWithUndefined bool } func mergeDefineData(old DefineData, new DefineData) DefineData { if old.CanBeRemovedIfUnused { new.CanBeRemovedIfUnused = true } if old.CallCanBeUnwrappedIfUnused { new.CallCanBeUnwrappedIfUnused = true } return new } type DotDefine struct { Data DefineData Parts []string } func arePartsEqual(a []string, b []string) bool { if len(a) != len(b) { return false } for i := range a { if a[i] != b[i] { return false } } return true } ================================================ FILE: lib/esbuild/css_ast/css_ast.go ================================================ package css_ast import ( "strconv" "github.com/withastro/compiler/lib/esbuild/ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" "github.com/withastro/compiler/lib/esbuild/helpers" "github.com/withastro/compiler/lib/esbuild/logger" ) // CSS syntax comes in two layers: a minimal syntax that generally accepts // anything that looks vaguely like CSS, and a large set of built-in rules // (the things browsers actually interpret). That way CSS parsers can read // unknown rules and skip over them without having to stop due to errors. // // This AST format is mostly just the minimal syntax. It parses unknown rules // into a tree with enough information that it can write them back out again. // There are some additional layers of syntax including selectors and @-rules // which allow for better pretty-printing and minification. // // Most of the AST just references ranges of the original file by keeping the // original "Token" values around from the lexer. This is a memory-efficient // representation that helps provide good parsing and printing performance. type AST struct { ImportRecords []ast.ImportRecord Rules []Rule SourceMapComment logger.Span ApproximateLineCount int32 } // We create a lot of tokens, so make sure this layout is memory-efficient. // The layout here isn't optimal because it biases for convenience (e.g. // "string" could be shorter) but at least the ordering of fields was // deliberately chosen to minimize size. type Token struct { // Contains the child tokens for component values that are simple blocks. // These are either "(", "{", "[", or function tokens. The closing token is // implicit and is not stored. Children *[]Token // 8 bytes // This is the raw contents of the token most of the time. However, it // contains the decoded string contents for "TString" tokens. Text string // 16 bytes // URL tokens have an associated import record at the top-level of the AST. // This index points to that import record. ImportRecordIndex uint32 // 4 bytes // The division between the number and the unit for "TDimension" tokens. UnitOffset uint16 // 2 bytes // This will never be "TWhitespace" because whitespace isn't stored as a // token directly. Instead it is stored in "HasWhitespaceAfter" on the // previous token. This is to make it easier to pattern-match against // tokens when handling CSS rules, since whitespace almost always doesn't // matter. That way you can pattern match against e.g. "rgb(r, g, b)" and // not have to handle all possible combinations of embedded whitespace // tokens. // // There is one exception to this: when in verbatim whitespace mode and // the token list is non-empty and is only whitespace tokens. In that case // a single whitespace token is emitted. This is because otherwise there // would be no tokens to attach the whitespace before/after flags to. Kind css_lexer.T // 1 byte // These flags indicate the presence of a "TWhitespace" token before or after // this token. There should be whitespace printed between two tokens if either // token indicates that there should be whitespace. Note that whitespace may // be altered by processing in certain situations (e.g. minification). Whitespace WhitespaceFlags // 1 byte } type WhitespaceFlags uint8 const ( WhitespaceBefore WhitespaceFlags = 1 << iota WhitespaceAfter ) func (a Token) Equal(b Token) bool { if a.Kind == b.Kind && a.Text == b.Text && a.ImportRecordIndex == b.ImportRecordIndex && a.Whitespace == b.Whitespace { if a.Children == nil && b.Children == nil { return true } if a.Children != nil && b.Children != nil && TokensEqual(*a.Children, *b.Children) { return true } } return false } func TokensEqual(a []Token, b []Token) bool { if len(a) != len(b) { return false } for i, c := range a { if !c.Equal(b[i]) { return false } } return true } func HashTokens(hash uint32, tokens []Token) uint32 { hash = helpers.HashCombine(hash, uint32(len(tokens))) for _, t := range tokens { hash = helpers.HashCombine(hash, uint32(t.Kind)) hash = helpers.HashCombineString(hash, t.Text) if t.Children != nil { hash = HashTokens(hash, *t.Children) } } return hash } func (a Token) EqualIgnoringWhitespace(b Token) bool { if a.Kind == b.Kind && a.Text == b.Text && a.ImportRecordIndex == b.ImportRecordIndex { if a.Children == nil && b.Children == nil { return true } if a.Children != nil && b.Children != nil && TokensEqualIgnoringWhitespace(*a.Children, *b.Children) { return true } } return false } func TokensEqualIgnoringWhitespace(a []Token, b []Token) bool { if len(a) != len(b) { return false } for i, c := range a { if !c.EqualIgnoringWhitespace(b[i]) { return false } } return true } func TokensAreCommaSeparated(tokens []Token) bool { if n := len(tokens); (n & 1) != 0 { for i := 1; i < n; i += 2 { if tokens[i].Kind != css_lexer.TComma { return false } } return true } return false } func (t Token) FractionForPercentage() (float64, bool) { if t.Kind == css_lexer.TPercentage { if f, err := strconv.ParseFloat(t.PercentageValue(), 64); err == nil { if f < 0 { return 0, true } if f > 100 { return 1, true } return f / 100.0, true } } return 0, false } // https://drafts.csswg.org/css-values-3/#lengths // For zero lengths the unit identifier is optional // (i.e. can be syntactically represented as the <number> 0). func (t *Token) TurnLengthIntoNumberIfZero() bool { if t.Kind == css_lexer.TDimension && t.DimensionValue() == "0" { t.Kind = css_lexer.TNumber t.Text = "0" return true } return false } func (t *Token) TurnLengthOrPercentageIntoNumberIfZero() bool { if t.Kind == css_lexer.TPercentage && t.PercentageValue() == "0" { t.Kind = css_lexer.TNumber t.Text = "0" return true } return t.TurnLengthIntoNumberIfZero() } func (t Token) PercentageValue() string { return t.Text[:len(t.Text)-1] } func (t Token) DimensionValue() string { return t.Text[:t.UnitOffset] } func (t Token) DimensionUnit() string { return t.Text[t.UnitOffset:] } func (t Token) DimensionUnitIsSafeLength() bool { switch t.DimensionUnit() { // These units can be reasonably expected to be supported everywhere. // Information used: https://developer.mozilla.org/en-US/docs/Web/CSS/length case "cm", "em", "in", "mm", "pc", "pt", "px": return true } return false } func (t Token) IsZero() bool { return t.Kind == css_lexer.TNumber && t.Text == "0" } func (t Token) IsOne() bool { return t.Kind == css_lexer.TNumber && t.Text == "1" } func (t Token) IsAngle() bool { if t.Kind == css_lexer.TDimension { unit := t.DimensionUnit() return unit == "deg" || unit == "grad" || unit == "rad" || unit == "turn" } return false } func CloneTokensWithImportRecords( tokensIn []Token, importRecordsIn []ast.ImportRecord, tokensOut []Token, importRecordsOut []ast.ImportRecord, ) ([]Token, []ast.ImportRecord) { for _, t := range tokensIn { // If this is a URL token, also clone the import record if t.Kind == css_lexer.TURL { importRecordIndex := uint32(len(importRecordsOut)) importRecordsOut = append(importRecordsOut, importRecordsIn[t.ImportRecordIndex]) t.ImportRecordIndex = importRecordIndex } // Also search for URL tokens in this token's children if t.Children != nil { var children []Token children, importRecordsOut = CloneTokensWithImportRecords(*t.Children, importRecordsIn, children, importRecordsOut) t.Children = &children } tokensOut = append(tokensOut, t) } return tokensOut, importRecordsOut } type Rule struct { Data R Loc logger.Loc } type R interface { Equal(rule R) bool Hash() (uint32, bool) } func RulesEqual(a []Rule, b []Rule) bool { if len(a) != len(b) { return false } for i, c := range a { if !c.Data.Equal(b[i].Data) { return false } } return true } func HashRules(hash uint32, rules []Rule) uint32 { hash = helpers.HashCombine(hash, uint32(len(rules))) for _, child := range rules { if childHash, ok := child.Data.Hash(); ok { hash = helpers.HashCombine(hash, childHash) } else { hash = helpers.HashCombine(hash, 0) } } return hash } type RAtCharset struct { Encoding string } func (a *RAtCharset) Equal(rule R) bool { b, ok := rule.(*RAtCharset) return ok && a.Encoding == b.Encoding } func (r *RAtCharset) Hash() (uint32, bool) { hash := uint32(1) hash = helpers.HashCombineString(hash, r.Encoding) return hash, true } type RAtImport struct { ImportConditions []Token ImportRecordIndex uint32 } func (*RAtImport) Equal(rule R) bool { return false } func (r *RAtImport) Hash() (uint32, bool) { return 0, false } type RAtKeyframes struct { AtToken string Name string Blocks []KeyframeBlock } type KeyframeBlock struct { Selectors []string Rules []Rule } func (a *RAtKeyframes) Equal(rule R) bool { if b, ok := rule.(*RAtKeyframes); ok && a.AtToken == b.AtToken && a.Name == b.Name && len(a.Blocks) == len(b.Blocks) { for i, ai := range a.Blocks { bi := b.Blocks[i] if len(ai.Selectors) != len(bi.Selectors) { return false } for j, aj := range ai.Selectors { if aj != bi.Selectors[j] { return false } } if !RulesEqual(ai.Rules, bi.Rules) { return false } } return true } return false } func (r *RAtKeyframes) Hash() (uint32, bool) { hash := uint32(2) hash = helpers.HashCombineString(hash, r.AtToken) hash = helpers.HashCombineString(hash, r.Name) hash = helpers.HashCombine(hash, uint32(len(r.Blocks))) for _, block := range r.Blocks { hash = helpers.HashCombine(hash, uint32(len(block.Selectors))) for _, sel := range block.Selectors { hash = helpers.HashCombineString(hash, sel) } hash = HashRules(hash, block.Rules) } return hash, true } type RKnownAt struct { AtToken string Prelude []Token Rules []Rule } func (a *RKnownAt) Equal(rule R) bool { b, ok := rule.(*RKnownAt) return ok && a.AtToken == b.AtToken && TokensEqual(a.Prelude, b.Prelude) && RulesEqual(a.Rules, a.Rules) } func (r *RKnownAt) Hash() (uint32, bool) { hash := uint32(3) hash = helpers.HashCombineString(hash, r.AtToken) hash = HashTokens(hash, r.Prelude) hash = HashRules(hash, r.Rules) return hash, true } type RUnknownAt struct { AtToken string Prelude []Token Block []Token } func (a *RUnknownAt) Equal(rule R) bool { b, ok := rule.(*RUnknownAt) return ok && a.AtToken == b.AtToken && TokensEqual(a.Prelude, b.Prelude) && TokensEqual(a.Block, a.Block) } func (r *RUnknownAt) Hash() (uint32, bool) { hash := uint32(4) hash = helpers.HashCombineString(hash, r.AtToken) hash = HashTokens(hash, r.Prelude) hash = HashTokens(hash, r.Block) return hash, true } type RSelector struct { Selectors []ComplexSelector Rules []Rule HasAtNest bool } func (a *RSelector) Equal(rule R) bool { b, ok := rule.(*RSelector) if ok && len(a.Selectors) == len(b.Selectors) && a.HasAtNest == b.HasAtNest { for i, sel := range a.Selectors { if !sel.Equal(b.Selectors[i]) { return false } } return RulesEqual(a.Rules, b.Rules) } return false } func (r *RSelector) Hash() (uint32, bool) { hash := uint32(5) hash = helpers.HashCombine(hash, uint32(len(r.Selectors))) for _, complex := range r.Selectors { hash = helpers.HashCombine(hash, uint32(len(complex.Selectors))) for _, sel := range complex.Selectors { if sel.TypeSelector != nil { hash = helpers.HashCombineString(hash, sel.TypeSelector.Name.Text) } else { hash = helpers.HashCombine(hash, 0) } hash = helpers.HashCombine(hash, uint32(len(sel.SubclassSelectors))) for _, sub := range sel.SubclassSelectors { hash = helpers.HashCombine(hash, sub.Hash()) } hash = helpers.HashCombineString(hash, sel.Combinator) } } hash = HashRules(hash, r.Rules) return hash, true } type RQualified struct { Prelude []Token Rules []Rule } func (a *RQualified) Equal(rule R) bool { b, ok := rule.(*RQualified) return ok && TokensEqual(a.Prelude, b.Prelude) && RulesEqual(a.Rules, b.Rules) } func (r *RQualified) Hash() (uint32, bool) { hash := uint32(6) hash = HashTokens(hash, r.Prelude) hash = HashRules(hash, r.Rules) return hash, true } type RDeclaration struct { KeyText string Value []Token KeyRange logger.Range Key D // Compare using this instead of "Key" for speed Important bool } func (a *RDeclaration) Equal(rule R) bool { b, ok := rule.(*RDeclaration) return ok && a.KeyText == b.KeyText && TokensEqual(a.Value, b.Value) && a.Important == b.Important } func (r *RDeclaration) Hash() (uint32, bool) { hash := uint32(7) hash = helpers.HashCombine(hash, uint32(r.Key)) hash = HashTokens(hash, r.Value) return hash, true } type RBadDeclaration struct { Tokens []Token } func (a *RBadDeclaration) Equal(rule R) bool { b, ok := rule.(*RBadDeclaration) return ok && TokensEqual(a.Tokens, b.Tokens) } func (r *RBadDeclaration) Hash() (uint32, bool) { hash := uint32(8) hash = HashTokens(hash, r.Tokens) return hash, true } type RComment struct { Text string } func (a *RComment) Equal(rule R) bool { b, ok := rule.(*RComment) return ok && a.Text == b.Text } func (r *RComment) Hash() (uint32, bool) { hash := uint32(9) hash = helpers.HashCombineString(hash, r.Text) return hash, true } type RAtLayer struct { Names [][]string Rules []Rule } func (a *RAtLayer) Equal(rule R) bool { if b, ok := rule.(*RAtLayer); ok && len(a.Names) == len(b.Names) && len(a.Rules) == len(b.Rules) { for i, ai := range a.Names { bi := b.Names[i] if len(ai) != len(bi) { return false } for j, aj := range ai { if aj != bi[j] { return false } } } if !RulesEqual(a.Rules, b.Rules) { return false } } return false } func (r *RAtLayer) Hash() (uint32, bool) { hash := uint32(10) hash = helpers.HashCombine(hash, uint32(len(r.Names))) for _, parts := range r.Names { hash = helpers.HashCombine(hash, uint32(len(parts))) for _, part := range parts { hash = helpers.HashCombineString(hash, part) } } hash = HashRules(hash, r.Rules) return hash, true } type ComplexSelector struct { Selectors []CompoundSelector } func (a ComplexSelector) Equal(b ComplexSelector) bool { if len(a.Selectors) != len(b.Selectors) { return false } for i, ai := range a.Selectors { bi := b.Selectors[i] if ai.NestingSelector != bi.NestingSelector || ai.Combinator != bi.Combinator { return false } if ats, bts := ai.TypeSelector, bi.TypeSelector; (ats == nil) != (bts == nil) { return false } else if ats != nil && bts != nil && !ats.Equal(*bts) { return false } if len(ai.SubclassSelectors) != len(bi.SubclassSelectors) { return false } for j, aj := range ai.SubclassSelectors { if !aj.Equal(bi.SubclassSelectors[j]) { return false } } } return true } type NestingSelector uint8 const ( NestingSelectorNone NestingSelector = iota NestingSelectorPrefix // "&a {}" NestingSelectorPresentButNotPrefix // "a& {}" ) type CompoundSelector struct { Combinator string // Optional, may be "" TypeSelector *NamespacedName SubclassSelectors []SS NestingSelector NestingSelector // "&" } type NameToken struct { Text string Kind css_lexer.T } type NamespacedName struct { // If present, this is an identifier or "*" and is followed by a "|" character NamespacePrefix *NameToken // This is an identifier or "*" Name NameToken } func (a NamespacedName) Equal(b NamespacedName) bool { return a.Name == b.Name && (a.NamespacePrefix == nil) == (b.NamespacePrefix == nil) && (a.NamespacePrefix == nil || b.NamespacePrefix == nil || *a.NamespacePrefix == *b.NamespacePrefix) } type SS interface { Equal(ss SS) bool Hash() uint32 } type SSHash struct { Name string } func (a *SSHash) Equal(ss SS) bool { b, ok := ss.(*SSHash) return ok && a.Name == b.Name } func (ss *SSHash) Hash() uint32 { hash := uint32(1) hash = helpers.HashCombineString(hash, ss.Name) return hash } type SSClass struct { Name string } func (a *SSClass) Equal(ss SS) bool { b, ok := ss.(*SSClass) return ok && a.Name == b.Name } func (ss *SSClass) Hash() uint32 { hash := uint32(2) hash = helpers.HashCombineString(hash, ss.Name) return hash } type SSAttribute struct { MatcherOp string // Either "" or one of: "=" "~=" "|=" "^=" "$=" "*=" MatcherValue string NamespacedName NamespacedName MatcherModifier byte // Either 0 or one of: 'i' 'I' 's' 'S' } func (a *SSAttribute) Equal(ss SS) bool { b, ok := ss.(*SSAttribute) return ok && a.NamespacedName.Equal(b.NamespacedName) && a.MatcherOp == b.MatcherOp && a.MatcherValue == b.MatcherValue && a.MatcherModifier == b.MatcherModifier } func (ss *SSAttribute) Hash() uint32 { hash := uint32(3) hash = helpers.HashCombineString(hash, ss.NamespacedName.Name.Text) hash = helpers.HashCombineString(hash, ss.MatcherOp) hash = helpers.HashCombineString(hash, ss.MatcherValue) return hash } type SSPseudoClass struct { Name string Args []Token IsElement bool // If true, this is prefixed by "::" instead of ":" } func (a *SSPseudoClass) Equal(ss SS) bool { b, ok := ss.(*SSPseudoClass) return ok && a.Name == b.Name && TokensEqual(a.Args, b.Args) && a.IsElement == b.IsElement } func (ss *SSPseudoClass) Hash() uint32 { hash := uint32(4) hash = helpers.HashCombineString(hash, ss.Name) hash = HashTokens(hash, ss.Args) return hash } ================================================ FILE: lib/esbuild/css_ast/css_decl_table.go ================================================ package css_ast import ( "strings" "sync" "github.com/withastro/compiler/lib/esbuild/helpers" ) type D uint16 const ( DUnknown D = iota DAlignContent DAlignItems DAlignSelf DAlignmentBaseline DAll DAnimation DAnimationDelay DAnimationDirection DAnimationDuration DAnimationFillMode DAnimationIterationCount DAnimationName DAnimationPlayState DAnimationTimingFunction DBackfaceVisibility DBackground DBackgroundAttachment DBackgroundClip DBackgroundColor DBackgroundImage DBackgroundOrigin DBackgroundPosition DBackgroundPositionX DBackgroundPositionY DBackgroundRepeat DBackgroundSize DBaselineShift DBlockSize DBorder DBorderBlockEnd DBorderBlockEndColor DBorderBlockEndStyle DBorderBlockEndWidth DBorderBlockStart DBorderBlockStartColor DBorderBlockStartStyle DBorderBlockStartWidth DBorderBottom DBorderBottomColor DBorderBottomLeftRadius DBorderBottomRightRadius DBorderBottomStyle DBorderBottomWidth DBorderCollapse DBorderColor DBorderImage DBorderImageOutset DBorderImageRepeat DBorderImageSlice DBorderImageSource DBorderImageWidth DBorderInlineEnd DBorderInlineEndColor DBorderInlineEndStyle DBorderInlineEndWidth DBorderInlineStart DBorderInlineStartColor DBorderInlineStartStyle DBorderInlineStartWidth DBorderLeft DBorderLeftColor DBorderLeftStyle DBorderLeftWidth DBorderRadius DBorderRight DBorderRightColor DBorderRightStyle DBorderRightWidth DBorderSpacing DBorderStyle DBorderTop DBorderTopColor DBorderTopLeftRadius DBorderTopRightRadius DBorderTopStyle DBorderTopWidth DBorderWidth DBottom DBoxShadow DBoxSizing DBreakAfter DBreakBefore DBreakInside DCaptionSide DCaretColor DClear DClip DClipPath DClipRule DColor DColorInterpolation DColorInterpolationFilters DColumnCount DColumnFill DColumnGap DColumnRule DColumnRuleColor DColumnRuleStyle DColumnRuleWidth DColumnSpan DColumnWidth DColumns DContent DCounterIncrement DCounterReset DCssFloat DCssText DCursor DDirection DDisplay DDominantBaseline DEmptyCells DFill DFillOpacity DFillRule DFilter DFlex DFlexBasis DFlexDirection DFlexFlow DFlexGrow DFlexShrink DFlexWrap DFloat DFloodColor DFloodOpacity DFont DFontFamily DFontFeatureSettings DFontKerning DFontSize DFontSizeAdjust DFontStretch DFontStyle DFontSynthesis DFontVariant DFontVariantCaps DFontVariantEastAsian DFontVariantLigatures DFontVariantNumeric DFontVariantPosition DFontWeight DGap DGlyphOrientationVertical DGrid DGridArea DGridAutoColumns DGridAutoFlow DGridAutoRows DGridColumn DGridColumnEnd DGridColumnGap DGridColumnStart DGridGap DGridRow DGridRowEnd DGridRowGap DGridRowStart DGridTemplate DGridTemplateAreas DGridTemplateColumns DGridTemplateRows DHeight DHyphens DImageOrientation DImageRendering DInlineSize DInset DJustifyContent DJustifyItems DJustifySelf DLeft DLetterSpacing DLightingColor DLineBreak DLineHeight DListStyle DListStyleImage DListStylePosition DListStyleType DMargin DMarginBlockEnd DMarginBlockStart DMarginBottom DMarginInlineEnd DMarginInlineStart DMarginLeft DMarginRight DMarginTop DMarker DMarkerEnd DMarkerMid DMarkerStart DMask DMaskComposite DMaskImage DMaskPosition DMaskRepeat DMaskSize DMaskType DMaxBlockSize DMaxHeight DMaxInlineSize DMaxWidth DMinBlockSize DMinHeight DMinInlineSize DMinWidth DObjectFit DObjectPosition DOpacity DOrder DOrphans DOutline DOutlineColor DOutlineOffset DOutlineStyle DOutlineWidth DOverflow DOverflowAnchor DOverflowWrap DOverflowX DOverflowY DOverscrollBehavior DOverscrollBehaviorBlock DOverscrollBehaviorInline DOverscrollBehaviorX DOverscrollBehaviorY DPadding DPaddingBlockEnd DPaddingBlockStart DPaddingBottom DPaddingInlineEnd DPaddingInlineStart DPaddingLeft DPaddingRight DPaddingTop DPageBreakAfter DPageBreakBefore DPageBreakInside DPaintOrder DPerspective DPerspectiveOrigin DPlaceContent DPlaceItems DPlaceSelf DPointerEvents DPosition DQuotes DResize DRight DRotate DRowGap DRubyAlign DRubyPosition DScale DScrollBehavior DShapeRendering DStopColor DStopOpacity DStroke DStrokeDasharray DStrokeDashoffset DStrokeLinecap DStrokeLinejoin DStrokeMiterlimit DStrokeOpacity DStrokeWidth DTabSize DTableLayout DTextAlign DTextAlignLast DTextAnchor DTextCombineUpright DTextDecoration DTextDecorationColor DTextDecorationLine DTextDecorationStyle DTextEmphasis DTextEmphasisColor DTextEmphasisPosition DTextEmphasisStyle DTextIndent DTextJustify DTextOrientation DTextOverflow DTextRendering DTextShadow DTextTransform DTextUnderlinePosition DTop DTouchAction DTransform DTransformBox DTransformOrigin DTransformStyle DTransition DTransitionDelay DTransitionDuration DTransitionProperty DTransitionTimingFunction DTranslate DUnicodeBidi DUserSelect DVerticalAlign DVisibility DWhiteSpace DWidows DWidth DWillChange DWordBreak DWordSpacing DWordWrap DWritingMode DZIndex DZoom ) var KnownDeclarations = map[string]D{ "align-content": DAlignContent, "align-items": DAlignItems, "align-self": DAlignSelf, "alignment-baseline": DAlignmentBaseline, "all": DAll, "animation": DAnimation, "animation-delay": DAnimationDelay, "animation-direction": DAnimationDirection, "animation-duration": DAnimationDuration, "animation-fill-mode": DAnimationFillMode, "animation-iteration-count": DAnimationIterationCount, "animation-name": DAnimationName, "animation-play-state": DAnimationPlayState, "animation-timing-function": DAnimationTimingFunction, "backface-visibility": DBackfaceVisibility, "background": DBackground, "background-attachment": DBackgroundAttachment, "background-clip": DBackgroundClip, "background-color": DBackgroundColor, "background-image": DBackgroundImage, "background-origin": DBackgroundOrigin, "background-position": DBackgroundPosition, "background-position-x": DBackgroundPositionX, "background-position-y": DBackgroundPositionY, "background-repeat": DBackgroundRepeat, "background-size": DBackgroundSize, "baseline-shift": DBaselineShift, "block-size": DBlockSize, "border": DBorder, "border-block-end": DBorderBlockEnd, "border-block-end-color": DBorderBlockEndColor, "border-block-end-style": DBorderBlockEndStyle, "border-block-end-width": DBorderBlockEndWidth, "border-block-start": DBorderBlockStart, "border-block-start-color": DBorderBlockStartColor, "border-block-start-style": DBorderBlockStartStyle, "border-block-start-width": DBorderBlockStartWidth, "border-bottom": DBorderBottom, "border-bottom-color": DBorderBottomColor, "border-bottom-left-radius": DBorderBottomLeftRadius, "border-bottom-right-radius": DBorderBottomRightRadius, "border-bottom-style": DBorderBottomStyle, "border-bottom-width": DBorderBottomWidth, "border-collapse": DBorderCollapse, "border-color": DBorderColor, "border-image": DBorderImage, "border-image-outset": DBorderImageOutset, "border-image-repeat": DBorderImageRepeat, "border-image-slice": DBorderImageSlice, "border-image-source": DBorderImageSource, "border-image-width": DBorderImageWidth, "border-inline-end": DBorderInlineEnd, "border-inline-end-color": DBorderInlineEndColor, "border-inline-end-style": DBorderInlineEndStyle, "border-inline-end-width": DBorderInlineEndWidth, "border-inline-start": DBorderInlineStart, "border-inline-start-color": DBorderInlineStartColor, "border-inline-start-style": DBorderInlineStartStyle, "border-inline-start-width": DBorderInlineStartWidth, "border-left": DBorderLeft, "border-left-color": DBorderLeftColor, "border-left-style": DBorderLeftStyle, "border-left-width": DBorderLeftWidth, "border-radius": DBorderRadius, "border-right": DBorderRight, "border-right-color": DBorderRightColor, "border-right-style": DBorderRightStyle, "border-right-width": DBorderRightWidth, "border-spacing": DBorderSpacing, "border-style": DBorderStyle, "border-top": DBorderTop, "border-top-color": DBorderTopColor, "border-top-left-radius": DBorderTopLeftRadius, "border-top-right-radius": DBorderTopRightRadius, "border-top-style": DBorderTopStyle, "border-top-width": DBorderTopWidth, "border-width": DBorderWidth, "bottom": DBottom, "box-shadow": DBoxShadow, "box-sizing": DBoxSizing, "break-after": DBreakAfter, "break-before": DBreakBefore, "break-inside": DBreakInside, "caption-side": DCaptionSide, "caret-color": DCaretColor, "clear": DClear, "clip": DClip, "clip-path": DClipPath, "clip-rule": DClipRule, "color": DColor, "color-interpolation": DColorInterpolation, "color-interpolation-filters": DColorInterpolationFilters, "column-count": DColumnCount, "column-fill": DColumnFill, "column-gap": DColumnGap, "column-rule": DColumnRule, "column-rule-color": DColumnRuleColor, "column-rule-style": DColumnRuleStyle, "column-rule-width": DColumnRuleWidth, "column-span": DColumnSpan, "column-width": DColumnWidth, "columns": DColumns, "content": DContent, "counter-increment": DCounterIncrement, "counter-reset": DCounterReset, "css-float": DCssFloat, "css-text": DCssText, "cursor": DCursor, "direction": DDirection, "display": DDisplay, "dominant-baseline": DDominantBaseline, "empty-cells": DEmptyCells, "fill": DFill, "fill-opacity": DFillOpacity, "fill-rule": DFillRule, "filter": DFilter, "flex": DFlex, "flex-basis": DFlexBasis, "flex-direction": DFlexDirection, "flex-flow": DFlexFlow, "flex-grow": DFlexGrow, "flex-shrink": DFlexShrink, "flex-wrap": DFlexWrap, "float": DFloat, "flood-color": DFloodColor, "flood-opacity": DFloodOpacity, "font": DFont, "font-family": DFontFamily, "font-feature-settings": DFontFeatureSettings, "font-kerning": DFontKerning, "font-size": DFontSize, "font-size-adjust": DFontSizeAdjust, "font-stretch": DFontStretch, "font-style": DFontStyle, "font-synthesis": DFontSynthesis, "font-variant": DFontVariant, "font-variant-caps": DFontVariantCaps, "font-variant-east-asian": DFontVariantEastAsian, "font-variant-ligatures": DFontVariantLigatures, "font-variant-numeric": DFontVariantNumeric, "font-variant-position": DFontVariantPosition, "font-weight": DFontWeight, "gap": DGap, "glyph-orientation-vertical": DGlyphOrientationVertical, "grid": DGrid, "grid-area": DGridArea, "grid-auto-columns": DGridAutoColumns, "grid-auto-flow": DGridAutoFlow, "grid-auto-rows": DGridAutoRows, "grid-column": DGridColumn, "grid-column-end": DGridColumnEnd, "grid-column-gap": DGridColumnGap, "grid-column-start": DGridColumnStart, "grid-gap": DGridGap, "grid-row": DGridRow, "grid-row-end": DGridRowEnd, "grid-row-gap": DGridRowGap, "grid-row-start": DGridRowStart, "grid-template": DGridTemplate, "grid-template-areas": DGridTemplateAreas, "grid-template-columns": DGridTemplateColumns, "grid-template-rows": DGridTemplateRows, "height": DHeight, "hyphens": DHyphens, "image-orientation": DImageOrientation, "image-rendering": DImageRendering, "inline-size": DInlineSize, "inset": DInset, "justify-content": DJustifyContent, "justify-items": DJustifyItems, "justify-self": DJustifySelf, "left": DLeft, "letter-spacing": DLetterSpacing, "lighting-color": DLightingColor, "line-break": DLineBreak, "line-height": DLineHeight, "list-style": DListStyle, "list-style-image": DListStyleImage, "list-style-position": DListStylePosition, "list-style-type": DListStyleType, "margin": DMargin, "margin-block-end": DMarginBlockEnd, "margin-block-start": DMarginBlockStart, "margin-bottom": DMarginBottom, "margin-inline-end": DMarginInlineEnd, "margin-inline-start": DMarginInlineStart, "margin-left": DMarginLeft, "margin-right": DMarginRight, "margin-top": DMarginTop, "marker": DMarker, "marker-end": DMarkerEnd, "marker-mid": DMarkerMid, "marker-start": DMarkerStart, "mask": DMask, "mask-composite": DMaskComposite, "mask-image": DMaskImage, "mask-position": DMaskPosition, "mask-repeat": DMaskRepeat, "mask-size": DMaskSize, "mask-type": DMaskType, "max-block-size": DMaxBlockSize, "max-height": DMaxHeight, "max-inline-size": DMaxInlineSize, "max-width": DMaxWidth, "min-block-size": DMinBlockSize, "min-height": DMinHeight, "min-inline-size": DMinInlineSize, "min-width": DMinWidth, "object-fit": DObjectFit, "object-position": DObjectPosition, "opacity": DOpacity, "order": DOrder, "orphans": DOrphans, "outline": DOutline, "outline-color": DOutlineColor, "outline-offset": DOutlineOffset, "outline-style": DOutlineStyle, "outline-width": DOutlineWidth, "overflow": DOverflow, "overflow-anchor": DOverflowAnchor, "overflow-wrap": DOverflowWrap, "overflow-x": DOverflowX, "overflow-y": DOverflowY, "overscroll-behavior": DOverscrollBehavior, "overscroll-behavior-block": DOverscrollBehaviorBlock, "overscroll-behavior-inline": DOverscrollBehaviorInline, "overscroll-behavior-x": DOverscrollBehaviorX, "overscroll-behavior-y": DOverscrollBehaviorY, "padding": DPadding, "padding-block-end": DPaddingBlockEnd, "padding-block-start": DPaddingBlockStart, "padding-bottom": DPaddingBottom, "padding-inline-end": DPaddingInlineEnd, "padding-inline-start": DPaddingInlineStart, "padding-left": DPaddingLeft, "padding-right": DPaddingRight, "padding-top": DPaddingTop, "page-break-after": DPageBreakAfter, "page-break-before": DPageBreakBefore, "page-break-inside": DPageBreakInside, "paint-order": DPaintOrder, "perspective": DPerspective, "perspective-origin": DPerspectiveOrigin, "place-content": DPlaceContent, "place-items": DPlaceItems, "place-self": DPlaceSelf, "pointer-events": DPointerEvents, "position": DPosition, "quotes": DQuotes, "resize": DResize, "right": DRight, "rotate": DRotate, "row-gap": DRowGap, "ruby-align": DRubyAlign, "ruby-position": DRubyPosition, "scale": DScale, "scroll-behavior": DScrollBehavior, "shape-rendering": DShapeRendering, "stop-color": DStopColor, "stop-opacity": DStopOpacity, "stroke": DStroke, "stroke-dasharray": DStrokeDasharray, "stroke-dashoffset": DStrokeDashoffset, "stroke-linecap": DStrokeLinecap, "stroke-linejoin": DStrokeLinejoin, "stroke-miterlimit": DStrokeMiterlimit, "stroke-opacity": DStrokeOpacity, "stroke-width": DStrokeWidth, "tab-size": DTabSize, "table-layout": DTableLayout, "text-align": DTextAlign, "text-align-last": DTextAlignLast, "text-anchor": DTextAnchor, "text-combine-upright": DTextCombineUpright, "text-decoration": DTextDecoration, "text-decoration-color": DTextDecorationColor, "text-decoration-line": DTextDecorationLine, "text-decoration-style": DTextDecorationStyle, "text-emphasis": DTextEmphasis, "text-emphasis-color": DTextEmphasisColor, "text-emphasis-position": DTextEmphasisPosition, "text-emphasis-style": DTextEmphasisStyle, "text-indent": DTextIndent, "text-justify": DTextJustify, "text-orientation": DTextOrientation, "text-overflow": DTextOverflow, "text-rendering": DTextRendering, "text-shadow": DTextShadow, "text-transform": DTextTransform, "text-underline-position": DTextUnderlinePosition, "top": DTop, "touch-action": DTouchAction, "transform": DTransform, "transform-box": DTransformBox, "transform-origin": DTransformOrigin, "transform-style": DTransformStyle, "transition": DTransition, "transition-delay": DTransitionDelay, "transition-duration": DTransitionDuration, "transition-property": DTransitionProperty, "transition-timing-function": DTransitionTimingFunction, "translate": DTranslate, "unicode-bidi": DUnicodeBidi, "user-select": DUserSelect, "vertical-align": DVerticalAlign, "visibility": DVisibility, "white-space": DWhiteSpace, "widows": DWidows, "width": DWidth, "will-change": DWillChange, "word-break": DWordBreak, "word-spacing": DWordSpacing, "word-wrap": DWordWrap, "writing-mode": DWritingMode, "z-index": DZIndex, "zoom": DZoom, } var typoDetector *helpers.TypoDetector var typoDetectorMutex sync.Mutex func MaybeCorrectDeclarationTypo(text string) (string, bool) { // Ignore CSS variables, which should not be corrected to CSS properties if strings.HasPrefix(text, "--") { return "", false } typoDetectorMutex.Lock() defer typoDetectorMutex.Unlock() // Lazily-initialize the typo detector for speed when it's not needed if typoDetector == nil { valid := make([]string, 0, len(KnownDeclarations)) for key := range KnownDeclarations { valid = append(valid, key) } detector := helpers.MakeTypoDetector(valid) typoDetector = &detector } return typoDetector.MaybeCorrectTypo(text) } ================================================ FILE: lib/esbuild/css_lexer/css_lexer.go ================================================ package css_lexer import ( "strings" "unicode/utf8" "github.com/withastro/compiler/lib/esbuild/helpers" "github.com/withastro/compiler/lib/esbuild/logger" ) // The lexer converts a source file to a stream of tokens. Unlike esbuild's // JavaScript lexer, this CSS lexer runs to completion before the CSS parser // begins, resulting in a single array of all tokens in the file. type T uint8 const eof = -1 const ( TEndOfFile T = iota TAtKeyword TBadString TBadURL TCDC // "-->" TCDO // "<!--" TCloseBrace TCloseBracket TCloseParen TColon TComma TDelim TDelimAmpersand TDelimAsterisk TDelimBar TDelimCaret TDelimDollar TDelimDot TDelimEquals TDelimExclamation TDelimGreaterThan TDelimMinus TDelimPlus TDelimSlash TDelimTilde TDimension TFunction THash TIdent TNumber TOpenBrace TOpenBracket TOpenParen TPercentage TSemicolon TString TURL TWhitespace ) var tokenToString = []string{ "end of file", "@-keyword", "bad string token", "bad URL token", "\"-->\"", "\"<!--\"", "\"}\"", "\"]\"", "\")\"", "\":\"", "\",\"", "delimiter", "\"&\"", "\"*\"", "\"|\"", "\"^\"", "\"$\"", "\".\"", "\"=\"", "\"!\"", "\">\"", "\"-\"", "\"+\"", "\"/\"", "\"~\"", "dimension", "function token", "hash token", "identifier", "number", "\"{\"", "\"[\"", "\"(\"", "percentage", "\";\"", "string token", "URL token", "whitespace", } func (t T) String() string { return tokenToString[t] } func (t T) IsNumeric() bool { return t == TNumber || t == TPercentage || t == TDimension } type TokenFlags uint8 const ( IsID TokenFlags = 1 << iota DidWarnAboutSingleLineComment ) // This token struct is designed to be memory-efficient. It just references a // range in the input file instead of directly containing the substring of text // since a range takes up less memory than a string. type Token struct { Range logger.Range // 8 bytes UnitOffset uint16 // 2 bytes Kind T // 1 byte Flags TokenFlags // 1 byte } func (token Token) DecodedText(contents string) string { raw := contents[token.Range.Loc.Start:token.Range.End()] switch token.Kind { case TIdent, TDimension: return decodeEscapesInToken(raw) case TAtKeyword, THash: return decodeEscapesInToken(raw[1:]) case TFunction: return decodeEscapesInToken(raw[:len(raw)-1]) case TString: return decodeEscapesInToken(raw[1 : len(raw)-1]) case TURL: start := 4 end := len(raw) - 1 // Trim leading and trailing whitespace for start < end && isWhitespace(rune(raw[start])) { start++ } for start < end && isWhitespace(rune(raw[end-1])) { end-- } return decodeEscapesInToken(raw[start:end]) } return raw } type lexer struct { log logger.Log source logger.Source legalCommentsBefore []Comment sourceMappingURL logger.Span tracker logger.LineColumnTracker approximateNewlineCount int current int oldSingleLineCommentEnd logger.Loc codePoint rune Token Token } type Comment struct { Text string Loc logger.Loc TokenIndexAfter uint32 } type TokenizeResult struct { Tokens []Token LegalComments []Comment SourceMapComment logger.Span ApproximateLineCount int32 } func Tokenize(log logger.Log, source logger.Source) TokenizeResult { lexer := lexer{ log: log, source: source, tracker: logger.MakeLineColumnTracker(&source), } lexer.step() // The U+FEFF character is usually a zero-width non-breaking space. However, // when it's used at the start of a text stream it is called a BOM (byte order // mark) instead and indicates that the text stream is UTF-8 encoded. This is // problematic for us because CSS does not treat U+FEFF as whitespace. Only // " \t\r\n\f" characters are treated as whitespace. Skip over the BOM if it // is present so it doesn't cause us trouble when we try to parse it. if lexer.codePoint == '\uFEFF' { lexer.step() } lexer.next() var tokens []Token var comments []Comment for lexer.Token.Kind != TEndOfFile { if lexer.legalCommentsBefore != nil { for _, comment := range lexer.legalCommentsBefore { comment.TokenIndexAfter = uint32(len(tokens)) comments = append(comments, comment) } lexer.legalCommentsBefore = nil } tokens = append(tokens, lexer.Token) lexer.next() } if lexer.legalCommentsBefore != nil { for _, comment := range lexer.legalCommentsBefore { comment.TokenIndexAfter = uint32(len(tokens)) comments = append(comments, comment) } lexer.legalCommentsBefore = nil } return TokenizeResult{ Tokens: tokens, LegalComments: comments, ApproximateLineCount: int32(lexer.approximateNewlineCount) + 1, SourceMapComment: lexer.sourceMappingURL, } } func (lexer *lexer) step() { codePoint, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:]) // Use -1 to indicate the end of the file if width == 0 { codePoint = eof } // Track the approximate number of newlines in the file so we can preallocate // the line offset table in the printer for source maps. The line offset table // is the #1 highest allocation in the heap profile, so this is worth doing. // This count is approximate because it handles "\n" and "\r\n" (the common // cases) but not "\r" or "\u2028" or "\u2029". Getting this wrong is harmless // because it's only a preallocation. The array will just grow if it's too small. if codePoint == '\n' { lexer.approximateNewlineCount++ } lexer.codePoint = codePoint lexer.Token.Range.Len = int32(lexer.current) - lexer.Token.Range.Loc.Start lexer.current += width } func (lexer *lexer) next() { // Reference: https://www.w3.org/TR/css-syntax-3/ for { lexer.Token = Token{Range: logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}} switch lexer.codePoint { case eof: lexer.Token.Kind = TEndOfFile case '/': lexer.step() switch lexer.codePoint { case '*': lexer.step() lexer.consumeToEndOfMultiLineComment(lexer.Token.Range) continue case '/': // Warn when people use "//" comments, which are invalid in CSS loc := lexer.Token.Range.Loc if loc.Start >= lexer.oldSingleLineCommentEnd.Start { contents := lexer.source.Contents end := lexer.current for end < len(contents) && !isNewline(rune(contents[end])) { end++ } lexer.log.Add(logger.Warning, &lexer.tracker, logger.Range{Loc: loc, Len: 2}, "Comments in CSS use \"/* ... */\" instead of \"//\"") lexer.oldSingleLineCommentEnd.Start = int32(end) lexer.Token.Flags |= DidWarnAboutSingleLineComment } } lexer.Token.Kind = TDelimSlash case ' ', '\t', '\n', '\r', '\f': lexer.step() for { if isWhitespace(lexer.codePoint) { lexer.step() } else if lexer.codePoint == '/' && lexer.current < len(lexer.source.Contents) && lexer.source.Contents[lexer.current] == '*' { startRange := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 2} lexer.step() lexer.step() lexer.consumeToEndOfMultiLineComment(startRange) } else { break } } lexer.Token.Kind = TWhitespace case '"', '\'': lexer.Token.Kind = lexer.consumeString() case '#': lexer.step() if IsNameContinue(lexer.codePoint) || lexer.isValidEscape() { lexer.Token.Kind = THash if lexer.wouldStartIdentifier() { lexer.Token.Flags |= IsID } lexer.consumeName() } else { lexer.Token.Kind = TDelim } case '(': lexer.step() lexer.Token.Kind = TOpenParen case ')': lexer.step() lexer.Token.Kind = TCloseParen case '[': lexer.step() lexer.Token.Kind = TOpenBracket case ']': lexer.step() lexer.Token.Kind = TCloseBracket case '{': lexer.step() lexer.Token.Kind = TOpenBrace case '}': lexer.step() lexer.Token.Kind = TCloseBrace case ',': lexer.step() lexer.Token.Kind = TComma case ':': lexer.step() lexer.Token.Kind = TColon case ';': lexer.step() lexer.Token.Kind = TSemicolon case '+': if lexer.wouldStartNumber() { lexer.Token.Kind = lexer.consumeNumeric() } else { lexer.step() lexer.Token.Kind = TDelimPlus } case '.': if lexer.wouldStartNumber() { lexer.Token.Kind = lexer.consumeNumeric() } else { lexer.step() lexer.Token.Kind = TDelimDot } case '-': if lexer.wouldStartNumber() { lexer.Token.Kind = lexer.consumeNumeric() } else if lexer.current+2 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+2] == "->" { lexer.step() lexer.step() lexer.step() lexer.Token.Kind = TCDC } else if lexer.wouldStartIdentifier() { lexer.Token.Kind = lexer.consumeIdentLike() } else { lexer.step() lexer.Token.Kind = TDelimMinus } case '<': if lexer.current+3 <= len(lexer.source.Contents) && lexer.source.Contents[lexer.current:lexer.current+3] == "!--" { lexer.step() lexer.step() lexer.step() lexer.step() lexer.Token.Kind = TCDO } else { lexer.step() lexer.Token.Kind = TDelim } case '@': lexer.step() if lexer.wouldStartIdentifier() { lexer.consumeName() lexer.Token.Kind = TAtKeyword } else { lexer.Token.Kind = TDelim } case '\\': if lexer.isValidEscape() { lexer.Token.Kind = lexer.consumeIdentLike() } else { lexer.step() lexer.log.Add(logger.Error, &lexer.tracker, lexer.Token.Range, "Invalid escape") lexer.Token.Kind = TDelim } case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': lexer.Token.Kind = lexer.consumeNumeric() case '>': lexer.step() lexer.Token.Kind = TDelimGreaterThan case '~': lexer.step() lexer.Token.Kind = TDelimTilde case '&': lexer.step() lexer.Token.Kind = TDelimAmpersand case '*': lexer.step() lexer.Token.Kind = TDelimAsterisk case '|': lexer.step() lexer.Token.Kind = TDelimBar case '!': lexer.step() lexer.Token.Kind = TDelimExclamation case '=': lexer.step() lexer.Token.Kind = TDelimEquals case '^': lexer.step() lexer.Token.Kind = TDelimCaret case '$': lexer.step() lexer.Token.Kind = TDelimDollar default: if IsNameStart(lexer.codePoint) { lexer.Token.Kind = lexer.consumeIdentLike() } else { lexer.step() lexer.Token.Kind = TDelim } } return } } func (lexer *lexer) consumeToEndOfMultiLineComment(startRange logger.Range) { startOfSourceMappingURL := 0 isLegalComment := false switch lexer.codePoint { case '#', '@': // Keep track of the contents of the "sourceMappingURL=" comment if strings.HasPrefix(lexer.source.Contents[lexer.current:], " sourceMappingURL=") { startOfSourceMappingURL = lexer.current + len(" sourceMappingURL=") } case '!': // Remember if this is a legal comment isLegalComment = true } for { switch lexer.codePoint { case '*': endOfSourceMappingURL := lexer.current - 1 lexer.step() if lexer.codePoint == '/' { commentEnd := lexer.current lexer.step() // Record the source mapping URL if startOfSourceMappingURL != 0 { r := logger.Range{Loc: logger.Loc{Start: int32(startOfSourceMappingURL)}} text := lexer.source.Contents[startOfSourceMappingURL:endOfSourceMappingURL] for int(r.Len) < len(text) && !isWhitespace(rune(text[r.Len])) { r.Len++ } lexer.sourceMappingURL = logger.Span{Text: text[:r.Len], Range: r} } // Record legal comments if text := lexer.source.Contents[startRange.Loc.Start:commentEnd]; isLegalComment || containsAtPreserveOrAtLicense(text) { text = helpers.RemoveMultiLineCommentIndent(lexer.source.Contents[:startRange.Loc.Start], text) lexer.legalCommentsBefore = append(lexer.legalCommentsBefore, Comment{Loc: startRange.Loc, Text: text}) } return } case eof: // This indicates the end of the file lexer.log.AddWithNotes(logger.Error, &lexer.tracker, logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}, "Expected \"*/\" to terminate multi-line comment", []logger.MsgData{lexer.tracker.MsgData(startRange, "The multi-line comment starts here:")}) return default: lexer.step() } } } func containsAtPreserveOrAtLicense(text string) bool { for i, c := range text { if c == '@' && (strings.HasPrefix(text[i+1:], "preserve") || strings.HasPrefix(text[i+1:], "license")) { return true } } return false } func (lexer *lexer) isValidEscape() bool { if lexer.codePoint != '\\' { return false } c, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:]) return !isNewline(c) } func (lexer *lexer) wouldStartIdentifier() bool { if IsNameStart(lexer.codePoint) { return true } if lexer.codePoint == '-' { c, width := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current:]) if c == utf8.RuneError && width <= 1 { return false // Decoding error } if IsNameStart(c) || c == '-' { return true } if c == '\\' { c2, _ := utf8.DecodeRuneInString(lexer.source.Contents[lexer.current+width:]) return !isNewline(c2) } return false } return lexer.isValidEscape() } func WouldStartIdentifierWithoutEscapes(text string) bool { c, width := utf8.DecodeRuneInString(text) if c == utf8.RuneError && width <= 1 { return false // Decoding error } if IsNameStart(c) { return true } if c == '-' { c2, width2 := utf8.DecodeRuneInString(text[width:]) if c2 == utf8.RuneError && width2 <= 1 { return false // Decoding error } if IsNameStart(c2) || c2 == '-' { return true } } return false } func (lexer *lexer) wouldStartNumber() bool { if lexer.codePoint >= '0' && lexer.codePoint <= '9' { return true } else if lexer.codePoint == '.' { contents := lexer.source.Contents if lexer.current < len(contents) { c := contents[lexer.current] return c >= '0' && c <= '9' } } else if lexer.codePoint == '+' || lexer.codePoint == '-' { contents := lexer.source.Contents n := len(contents) if lexer.current < n { c := contents[lexer.current] if c >= '0' && c <= '9' { return true } if c == '.' && lexer.current+1 < n { c = contents[lexer.current+1] return c >= '0' && c <= '9' } } } return false } func (lexer *lexer) consumeName() string { // Common case: no escapes, identifier is a substring of the input for IsNameContinue(lexer.codePoint) { lexer.step() } raw := lexer.source.Contents[lexer.Token.Range.Loc.Start:lexer.Token.Range.End()] if !lexer.isValidEscape() { return raw } // Uncommon case: escapes, identifier is allocated sb := strings.Builder{} sb.WriteString(raw) sb.WriteRune(lexer.consumeEscape()) for { if IsNameContinue(lexer.codePoint) { sb.WriteRune(lexer.codePoint) lexer.step() } else if lexer.isValidEscape() { sb.WriteRune(lexer.consumeEscape()) } else { break } } return sb.String() } func (lexer *lexer) consumeEscape() rune { lexer.step() // Skip the backslash c := lexer.codePoint if hex, ok := isHex(c); ok { lexer.step() for i := 0; i < 5; i++ { if next, ok := isHex(lexer.codePoint); ok { lexer.step() hex = hex*16 + next } else { break } } if isWhitespace(lexer.codePoint) { lexer.step() } if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF { return utf8.RuneError } return rune(hex) } if c == eof { return utf8.RuneError } lexer.step() return c } func (lexer *lexer) consumeIdentLike() T { name := lexer.consumeName() if lexer.codePoint == '(' { lexer.step() if len(name) == 3 { u, r, l := name[0], name[1], name[2] if (u == 'u' || u == 'U') && (r == 'r' || r == 'R') && (l == 'l' || l == 'L') { for isWhitespace(lexer.codePoint) { lexer.step() } if lexer.codePoint != '"' && lexer.codePoint != '\'' { return lexer.consumeURL() } } } return TFunction } return TIdent } func (lexer *lexer) consumeURL() T { validURL: for { switch lexer.codePoint { case ')': lexer.step() return TURL case eof: loc := logger.Loc{Start: lexer.Token.Range.End()} lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token") return TBadURL case ' ', '\t', '\n', '\r', '\f': lexer.step() for isWhitespace(lexer.codePoint) { lexer.step() } if lexer.codePoint != ')' { loc := logger.Loc{Start: lexer.Token.Range.End()} lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token") break validURL } lexer.step() return TURL case '"', '\'', '(': r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1} lexer.log.Add(logger.Error, &lexer.tracker, r, "Expected \")\" to end URL token") break validURL case '\\': if !lexer.isValidEscape() { r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1} lexer.log.Add(logger.Error, &lexer.tracker, r, "Invalid escape") break validURL } lexer.consumeEscape() default: if isNonPrintable(lexer.codePoint) { r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1} lexer.log.Add(logger.Error, &lexer.tracker, r, "Unexpected non-printable character in URL token") } lexer.step() } } // Consume the remnants of a bad url for { switch lexer.codePoint { case ')', eof: lexer.step() return TBadURL case '\\': if lexer.isValidEscape() { lexer.consumeEscape() } } lexer.step() } } func (lexer *lexer) consumeString() T { quote := lexer.codePoint lexer.step() for { switch lexer.codePoint { case '\\': lexer.step() // Handle Windows CRLF if lexer.codePoint == '\r' { lexer.step() if lexer.codePoint == '\n' { lexer.step() } continue } // Otherwise, fall through to ignore the character after the backslash case eof: lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}, "Unterminated string token") return TBadString case '\n', '\r', '\f': lexer.log.Add(logger.Error, &lexer.tracker, logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}}, "Unterminated string token") return TBadString case quote: lexer.step() return TString } lexer.step() } } func (lexer *lexer) consumeNumeric() T { // Skip over leading sign if lexer.codePoint == '+' || lexer.codePoint == '-' { lexer.step() } // Skip over leading digits for lexer.codePoint >= '0' && lexer.codePoint <= '9' { lexer.step() } // Skip over digits after dot if lexer.codePoint == '.' { lexer.step() for lexer.codePoint >= '0' && lexer.codePoint <= '9' { lexer.step() } } // Skip over exponent if lexer.codePoint == 'e' || lexer.codePoint == 'E' { contents := lexer.source.Contents // Look ahead before advancing to make sure this is an exponent, not a unit if lexer.current < len(contents) { c := contents[lexer.current] if (c == '+' || c == '-') && lexer.current+1 < len(contents) { c = contents[lexer.current+1] } // Only consume this if it's an exponent if c >= '0' && c <= '9' { lexer.step() if lexer.codePoint == '+' || lexer.codePoint == '-' { lexer.step() } for lexer.codePoint >= '0' && lexer.codePoint <= '9' { lexer.step() } } } } // Determine the numeric type if lexer.wouldStartIdentifier() { lexer.Token.UnitOffset = uint16(lexer.Token.Range.Len) lexer.consumeName() return TDimension } if lexer.codePoint == '%' { lexer.step() return TPercentage } return TNumber } func IsNameStart(c rune) bool { return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c >= 0x80 || c == '\x00' } func IsNameContinue(c rune) bool { return IsNameStart(c) || (c >= '0' && c <= '9') || c == '-' } func isNewline(c rune) bool { switch c { case '\n', '\r', '\f': return true } return false } func isWhitespace(c rune) bool { switch c { case ' ', '\t', '\n', '\r', '\f': return true } return false } func isHex(c rune) (int, bool) { if c >= '0' && c <= '9' { return int(c - '0'), true } if c >= 'a' && c <= 'f' { return int(c + (10 - 'a')), true } if c >= 'A' && c <= 'F' { return int(c + (10 - 'A')), true } return 0, false } func isNonPrintable(c rune) bool { return c <= 0x08 || c == 0x0B || (c >= 0x0E && c <= 0x1F) || c == 0x7F } func decodeEscapesInToken(inner string) string { i := 0 for i < len(inner) { if c := inner[i]; c == '\\' || c == '\x00' { break } i++ } if i == len(inner) { return inner } sb := strings.Builder{} sb.WriteString(inner[:i]) inner = inner[i:] for len(inner) > 0 { c, width := utf8.DecodeRuneInString(inner) inner = inner[width:] if c != '\\' { if c == '\x00' { c = utf8.RuneError } sb.WriteRune(c) continue } if len(inner) == 0 { sb.WriteRune(utf8.RuneError) continue } c, width = utf8.DecodeRuneInString(inner) inner = inner[width:] hex, ok := isHex(c) if !ok { if c == '\n' || c == '\f' { continue } // Handle Windows CRLF if c == '\r' { c, width = utf8.DecodeRuneInString(inner) if c == '\n' { inner = inner[width:] } continue } // If we get here, this is not a valid escape. However, this is still // allowed. In this case the backslash is just ignored. sb.WriteRune(c) continue } // Parse up to five additional hex characters (so six in total) for i := 0; i < 5 && len(inner) > 0; i++ { c, width = utf8.DecodeRuneInString(inner) if next, ok := isHex(c); ok { inner = inner[width:] hex = hex*16 + next } else { break } } if len(inner) > 0 { c, width = utf8.DecodeRuneInString(inner) if isWhitespace(c) { inner = inner[width:] } } if hex == 0 || (hex >= 0xD800 && hex <= 0xDFFF) || hex > 0x10FFFF { sb.WriteRune(utf8.RuneError) continue } sb.WriteRune(rune(hex)) } return sb.String() } ================================================ FILE: lib/esbuild/css_lexer/css_lexer_test.go ================================================ package css_lexer import ( "testing" "github.com/withastro/compiler/lib/esbuild/logger" "github.com/withastro/compiler/lib/esbuild/test" ) func lexToken(contents string) (T, string) { log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug) result := Tokenize(log, test.SourceForTest(contents)) if len(result.Tokens) > 0 { t := result.Tokens[0] return t.Kind, t.DecodedText(contents) } return TEndOfFile, "" } func lexerError(contents string) string { log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug) Tokenize(log, test.SourceForTest(contents)) text := "" for _, msg := range log.Done() { text += msg.String(logger.OutputOptions{}, logger.TerminalInfo{}) } return text } func TestTokens(t *testing.T) { expected := []struct { contents string text string token T }{ {"", "end of file", TEndOfFile}, {"@media", "@-keyword", TAtKeyword}, {"url(x y", "bad URL token", TBadURL}, {"-->", "\"-->\"", TCDC}, {"<!--", "\"<!--\"", TCDO}, {"}", "\"}\"", TCloseBrace}, {"]", "\"]\"", TCloseBracket}, {")", "\")\"", TCloseParen}, {":", "\":\"", TColon}, {",", "\",\"", TComma}, {"?", "delimiter", TDelim}, {"&", "\"&\"", TDelimAmpersand}, {"*", "\"*\"", TDelimAsterisk}, {"|", "\"|\"", TDelimBar}, {"^", "\"^\"", TDelimCaret}, {"$", "\"$\"", TDelimDollar}, {".", "\".\"", TDelimDot}, {"=", "\"=\"", TDelimEquals}, {"!", "\"!\"", TDelimExclamation}, {">", "\">\"", TDelimGreaterThan}, {"+", "\"+\"", TDelimPlus}, {"/", "\"/\"", TDelimSlash}, {"~", "\"~\"", TDelimTilde}, {"1px", "dimension", TDimension}, {"max(", "function token", TFunction}, {"#name", "hash token", THash}, {"name", "identifier", TIdent}, {"123", "number", TNumber}, {"{", "\"{\"", TOpenBrace}, {"[", "\"[\"", TOpenBracket}, {"(", "\"(\"", TOpenParen}, {"50%", "percentage", TPercentage}, {";", "\";\"", TSemicolon}, {"'abc'", "string token", TString}, {"url(test)", "URL token", TURL}, {" ", "whitespace", TWhitespace}, } for _, it := range expected { contents := it.contents token := it.token t.Run(contents, func(t *testing.T) { kind, _ := lexToken(contents) test.AssertEqual(t, kind, token) }) } } func TestStringParsing(t *testing.T) { contentsOfStringToken := func(contents string) string { t.Helper() kind, text := lexToken(contents) test.AssertEqual(t, kind, TString) return text } test.AssertEqual(t, contentsOfStringToken("\"foo\""), "foo") test.AssertEqual(t, contentsOfStringToken("\"f\\oo\""), "foo") test.AssertEqual(t, contentsOfStringToken("\"f\\\"o\""), "f\"o") test.AssertEqual(t, contentsOfStringToken("\"f\\\\o\""), "f\\o") test.AssertEqual(t, contentsOfStringToken("\"f\\\no\""), "fo") test.AssertEqual(t, contentsOfStringToken("\"f\\\ro\""), "fo") test.AssertEqual(t, contentsOfStringToken("\"f\\\r\no\""), "fo") test.AssertEqual(t, contentsOfStringToken("\"f\\\fo\""), "fo") test.AssertEqual(t, contentsOfStringToken("\"f\\6fo\""), "foo") test.AssertEqual(t, contentsOfStringToken("\"f\\6f o\""), "foo") test.AssertEqual(t, contentsOfStringToken("\"f\\6f o\""), "fo o") test.AssertEqual(t, contentsOfStringToken("\"f\\fffffffo\""), "f\uFFFDfo") test.AssertEqual(t, contentsOfStringToken("\"f\\10abcdeo\""), "f\U0010ABCDeo") } func TestURLParsing(t *testing.T) { contentsOfURLToken := func(expected T, contents string) string { t.Helper() kind, text := lexToken(contents) test.AssertEqual(t, kind, expected) return text } test.AssertEqual(t, contentsOfURLToken(TURL, "url(foo)"), "foo") test.AssertEqual(t, contentsOfURLToken(TURL, "url( foo\t\t)"), "foo") test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\oo)"), "foo") test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\\"o)"), "f\"o") test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\'o)"), "f'o") test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\)o)"), "f)o") test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\6fo)"), "foo") test.AssertEqual(t, contentsOfURLToken(TURL, "url(f\\6f o)"), "foo") test.AssertEqual(t, contentsOfURLToken(TBadURL, "url(f\\6f o)"), "url(f\\6f o)") } func TestComment(t *testing.T) { test.AssertEqualWithDiff(t, lexerError("/*"), "<stdin>: ERROR: Expected \"*/\" to terminate multi-line comment\n<stdin>: NOTE: The multi-line comment starts here:\n") test.AssertEqualWithDiff(t, lexerError("/*/"), "<stdin>: ERROR: Expected \"*/\" to terminate multi-line comment\n<stdin>: NOTE: The multi-line comment starts here:\n") test.AssertEqualWithDiff(t, lexerError("/**/"), "") test.AssertEqualWithDiff(t, lexerError("//"), "<stdin>: WARNING: Comments in CSS use \"/* ... */\" instead of \"//\"\n") } func TestString(t *testing.T) { test.AssertEqualWithDiff(t, lexerError("'"), "<stdin>: ERROR: Unterminated string token\n") test.AssertEqualWithDiff(t, lexerError("\""), "<stdin>: ERROR: Unterminated string token\n") test.AssertEqualWithDiff(t, lexerError("'\\'"), "<stdin>: ERROR: Unterminated string token\n") test.AssertEqualWithDiff(t, lexerError("\"\\\""), "<stdin>: ERROR: Unterminated string token\n") test.AssertEqualWithDiff(t, lexerError("''"), "") test.AssertEqualWithDiff(t, lexerError("\"\""), "") } func TestBOM(t *testing.T) { // A byte order mark should not be parsed as an identifier kind, _ := lexToken("\uFEFF.") test.AssertEqual(t, kind, TDelimDot) } ================================================ FILE: lib/esbuild/css_parser/css_decls.go ================================================ package css_parser import ( "github.com/withastro/compiler/lib/esbuild/compat" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) func (p *parser) commaToken() css_ast.Token { t := css_ast.Token{ Kind: css_lexer.TComma, Text: ",", } if !p.options.MinifyWhitespace { t.Whitespace = css_ast.WhitespaceAfter } return t } func expandTokenQuad(tokens []css_ast.Token, allowedIdent string) (result [4]css_ast.Token, ok bool) { n := len(tokens) if n < 1 || n > 4 { return } // Don't do this if we encounter any unexpected tokens such as "var()" for i := 0; i < n; i++ { if t := tokens[i]; !t.Kind.IsNumeric() && (t.Kind != css_lexer.TIdent || allowedIdent == "" || t.Text != allowedIdent) { return } } result[0] = tokens[0] if n > 1 { result[1] = tokens[1] } else { result[1] = result[0] } if n > 2 { result[2] = tokens[2] } else { result[2] = result[0] } if n > 3 { result[3] = tokens[3] } else { result[3] = result[1] } ok = true return } func compactTokenQuad(a css_ast.Token, b css_ast.Token, c css_ast.Token, d css_ast.Token, minifyWhitespace bool) []css_ast.Token { tokens := []css_ast.Token{a, b, c, d} if tokens[3].EqualIgnoringWhitespace(tokens[1]) { if tokens[2].EqualIgnoringWhitespace(tokens[0]) { if tokens[1].EqualIgnoringWhitespace(tokens[0]) { tokens = tokens[:1] } else { tokens = tokens[:2] } } else { tokens = tokens[:3] } } for i := range tokens { var whitespace css_ast.WhitespaceFlags if !minifyWhitespace || i > 0 { whitespace |= css_ast.WhitespaceBefore } if i+1 < len(tokens) { whitespace |= css_ast.WhitespaceAfter } tokens[i].Whitespace = whitespace } return tokens } func (p *parser) processDeclarations(rules []css_ast.Rule) []css_ast.Rule { margin := boxTracker{key: css_ast.DMargin, keyText: "margin", allowAuto: true} padding := boxTracker{key: css_ast.DPadding, keyText: "padding", allowAuto: false} inset := boxTracker{key: css_ast.DInset, keyText: "inset", allowAuto: true} borderRadius := borderRadiusTracker{} for i, rule := range rules { decl, ok := rule.Data.(*css_ast.RDeclaration) if !ok { continue } switch decl.Key { case css_ast.DBackgroundColor, css_ast.DBorderBlockEndColor, css_ast.DBorderBlockStartColor, css_ast.DBorderBottomColor, css_ast.DBorderColor, css_ast.DBorderInlineEndColor, css_ast.DBorderInlineStartColor, css_ast.DBorderLeftColor, css_ast.DBorderRightColor, css_ast.DBorderTopColor, css_ast.DCaretColor, css_ast.DColor, css_ast.DColumnRuleColor, css_ast.DFill, css_ast.DFloodColor, css_ast.DLightingColor, css_ast.DOutlineColor, css_ast.DStopColor, css_ast.DStroke, css_ast.DTextDecorationColor, css_ast.DTextEmphasisColor: if len(decl.Value) == 1 { decl.Value[0] = p.lowerColor(decl.Value[0]) if p.options.MinifySyntax { t := decl.Value[0] if hex, ok := parseColor(t); ok { decl.Value[0] = p.mangleColor(t, hex) } } } case css_ast.DFont: if p.options.MinifySyntax { decl.Value = p.mangleFont(decl.Value) } case css_ast.DFontFamily: if p.options.MinifySyntax { if value, ok := p.mangleFontFamily(decl.Value); ok { decl.Value = value } } case css_ast.DFontWeight: if len(decl.Value) == 1 && p.options.MinifySyntax { decl.Value[0] = p.mangleFontWeight(decl.Value[0]) } case css_ast.DTransform: if p.options.MinifySyntax { decl.Value = p.mangleTransforms(decl.Value) } case css_ast.DBoxShadow: if p.options.MinifySyntax { decl.Value = p.mangleBoxShadows(decl.Value) } // Margin case css_ast.DMargin: if p.options.MinifySyntax { margin.mangleSides(rules, decl, i, p.options.MinifyWhitespace) } case css_ast.DMarginTop: if p.options.MinifySyntax { margin.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxTop) } case css_ast.DMarginRight: if p.options.MinifySyntax { margin.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxRight) } case css_ast.DMarginBottom: if p.options.MinifySyntax { margin.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxBottom) } case css_ast.DMarginLeft: if p.options.MinifySyntax { margin.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxLeft) } // Padding case css_ast.DPadding: if p.options.MinifySyntax { padding.mangleSides(rules, decl, i, p.options.MinifyWhitespace) } case css_ast.DPaddingTop: if p.options.MinifySyntax { padding.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxTop) } case css_ast.DPaddingRight: if p.options.MinifySyntax { padding.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxRight) } case css_ast.DPaddingBottom: if p.options.MinifySyntax { padding.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxBottom) } case css_ast.DPaddingLeft: if p.options.MinifySyntax { padding.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxLeft) } // Inset case css_ast.DInset: if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MinifySyntax { inset.mangleSides(rules, decl, i, p.options.MinifyWhitespace) } case css_ast.DTop: if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MinifySyntax { inset.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxTop) } case css_ast.DRight: if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MinifySyntax { inset.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxRight) } case css_ast.DBottom: if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MinifySyntax { inset.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxBottom) } case css_ast.DLeft: if !p.options.UnsupportedCSSFeatures.Has(compat.InsetProperty) && p.options.MinifySyntax { inset.mangleSide(rules, decl, i, p.options.MinifyWhitespace, boxLeft) } // Border radius case css_ast.DBorderRadius: if p.options.MinifySyntax { borderRadius.mangleCorners(rules, decl, i, p.options.MinifyWhitespace) } case css_ast.DBorderTopLeftRadius: if p.options.MinifySyntax { borderRadius.mangleCorner(rules, decl, i, p.options.MinifyWhitespace, borderRadiusTopLeft) } case css_ast.DBorderTopRightRadius: if p.options.MinifySyntax { borderRadius.mangleCorner(rules, decl, i, p.options.MinifyWhitespace, borderRadiusTopRight) } case css_ast.DBorderBottomRightRadius: if p.options.MinifySyntax { borderRadius.mangleCorner(rules, decl, i, p.options.MinifyWhitespace, borderRadiusBottomRight) } case css_ast.DBorderBottomLeftRadius: if p.options.MinifySyntax { borderRadius.mangleCorner(rules, decl, i, p.options.MinifyWhitespace, borderRadiusBottomLeft) } } } // Compact removed rules if p.options.MinifySyntax { end := 0 for _, rule := range rules { if rule.Data != nil { rules[end] = rule end++ } } rules = rules[:end] } return rules } ================================================ FILE: lib/esbuild/css_parser/css_decls_border_radius.go ================================================ package css_parser import ( "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" "github.com/withastro/compiler/lib/esbuild/logger" ) const ( borderRadiusTopLeft = iota borderRadiusTopRight borderRadiusBottomRight borderRadiusBottomLeft ) type borderRadiusCorner struct { firstToken css_ast.Token secondToken css_ast.Token unitSafety unitSafetyTracker ruleIndex uint32 // The index of the originating rule in the rules array wasSingleRule bool // True if the originating rule was just for this side } type borderRadiusTracker struct { corners [4]borderRadiusCorner important bool // True if all active rules were flagged as "!important" } func (borderRadius *borderRadiusTracker) updateCorner(rules []css_ast.Rule, corner int, new borderRadiusCorner) { if old := borderRadius.corners[corner]; old.firstToken.Kind != css_lexer.TEndOfFile && (!new.wasSingleRule || old.wasSingleRule) && old.unitSafety.status == unitSafe && new.unitSafety.status == unitSafe { rules[old.ruleIndex] = css_ast.Rule{} } borderRadius.corners[corner] = new } func (borderRadius *borderRadiusTracker) mangleCorners(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, minifyWhitespace bool) { // Reset if we see a change in the "!important" flag if borderRadius.important != decl.Important { borderRadius.corners = [4]borderRadiusCorner{} borderRadius.important = decl.Important } tokens := decl.Value beforeSplit := len(tokens) afterSplit := len(tokens) // Search for the single slash if present for i, t := range tokens { if t.Kind == css_lexer.TDelimSlash { if beforeSplit == len(tokens) { beforeSplit = i afterSplit = i + 1 } else { // Multiple slashes are an error borderRadius.corners = [4]borderRadiusCorner{} return } } } // Use a single tracker for the whole rule unitSafety := unitSafetyTracker{} for _, t := range tokens[:beforeSplit] { unitSafety.includeUnitOf(t) } for _, t := range tokens[afterSplit:] { unitSafety.includeUnitOf(t) } firstRadii, firstRadiiOk := expandTokenQuad(tokens[:beforeSplit], "") lastRadii, lastRadiiOk := expandTokenQuad(tokens[afterSplit:], "") // Stop now if the pattern wasn't matched if !firstRadiiOk || (beforeSplit < afterSplit && !lastRadiiOk) { borderRadius.corners = [4]borderRadiusCorner{} return } // Handle the first radii for corner, t := range firstRadii { if unitSafety.status == unitSafe { t.TurnLengthIntoNumberIfZero() } borderRadius.updateCorner(rules, corner, borderRadiusCorner{ firstToken: t, secondToken: t, unitSafety: unitSafety, ruleIndex: uint32(index), }) } // Handle the last radii if lastRadiiOk { for corner, t := range lastRadii { if unitSafety.status == unitSafe { t.TurnLengthIntoNumberIfZero() } borderRadius.corners[corner].secondToken = t } } // Success borderRadius.compactRules(rules, decl.KeyRange, minifyWhitespace) } func (borderRadius *borderRadiusTracker) mangleCorner(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, minifyWhitespace bool, corner int) { // Reset if we see a change in the "!important" flag if borderRadius.important != decl.Important { borderRadius.corners = [4]borderRadiusCorner{} borderRadius.important = decl.Important } if tokens := decl.Value; (len(tokens) == 1 && tokens[0].Kind.IsNumeric()) || (len(tokens) == 2 && tokens[0].Kind.IsNumeric() && tokens[1].Kind.IsNumeric()) { firstToken := tokens[0] secondToken := firstToken if len(tokens) == 2 { secondToken = tokens[1] } // Check to see if these units are safe to use in every browser unitSafety := unitSafetyTracker{} unitSafety.includeUnitOf(firstToken) unitSafety.includeUnitOf(secondToken) // Only collapse "0unit" into "0" if the unit is safe if unitSafety.status == unitSafe && firstToken.TurnLengthIntoNumberIfZero() { tokens[0] = firstToken } if len(tokens) == 2 { if unitSafety.status == unitSafe && secondToken.TurnLengthIntoNumberIfZero() { tokens[1] = secondToken } // If both tokens are equal, merge them into one if firstToken.EqualIgnoringWhitespace(secondToken) { tokens[0].Whitespace &= ^css_ast.WhitespaceAfter decl.Value = tokens[:1] } } borderRadius.updateCorner(rules, corner, borderRadiusCorner{ firstToken: firstToken, secondToken: secondToken, unitSafety: unitSafety, ruleIndex: uint32(index), wasSingleRule: true, }) borderRadius.compactRules(rules, decl.KeyRange, minifyWhitespace) } else { borderRadius.corners = [4]borderRadiusCorner{} } } func (borderRadius *borderRadiusTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, minifyWhitespace bool) { // All tokens must be present if eof := css_lexer.TEndOfFile; borderRadius.corners[0].firstToken.Kind == eof || borderRadius.corners[1].firstToken.Kind == eof || borderRadius.corners[2].firstToken.Kind == eof || borderRadius.corners[3].firstToken.Kind == eof { return } // All tokens must have the same unit for _, side := range borderRadius.corners[1:] { if !side.unitSafety.isSafeWith(borderRadius.corners[0].unitSafety) { return } } // Generate the most minimal representation tokens := compactTokenQuad( borderRadius.corners[0].firstToken, borderRadius.corners[1].firstToken, borderRadius.corners[2].firstToken, borderRadius.corners[3].firstToken, minifyWhitespace, ) secondTokens := compactTokenQuad( borderRadius.corners[0].secondToken, borderRadius.corners[1].secondToken, borderRadius.corners[2].secondToken, borderRadius.corners[3].secondToken, minifyWhitespace, ) if !css_ast.TokensEqualIgnoringWhitespace(tokens, secondTokens) { var whitespace css_ast.WhitespaceFlags if !minifyWhitespace { whitespace = css_ast.WhitespaceBefore | css_ast.WhitespaceAfter } tokens = append(tokens, css_ast.Token{ Kind: css_lexer.TDelimSlash, Text: "/", Whitespace: whitespace, }) tokens = append(tokens, secondTokens...) } // Remove all of the existing declarations rules[borderRadius.corners[0].ruleIndex] = css_ast.Rule{} rules[borderRadius.corners[1].ruleIndex] = css_ast.Rule{} rules[borderRadius.corners[2].ruleIndex] = css_ast.Rule{} rules[borderRadius.corners[3].ruleIndex] = css_ast.Rule{} // Insert the combined declaration where the last rule was rules[borderRadius.corners[3].ruleIndex].Data = &css_ast.RDeclaration{ Key: css_ast.DBorderRadius, KeyText: "border-radius", Value: tokens, KeyRange: keyRange, Important: borderRadius.important, } } ================================================ FILE: lib/esbuild/css_parser/css_decls_box.go ================================================ package css_parser import ( "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" "github.com/withastro/compiler/lib/esbuild/logger" ) const ( boxTop = iota boxRight boxBottom boxLeft ) type boxSide struct { token css_ast.Token unitSafety unitSafetyTracker ruleIndex uint32 // The index of the originating rule in the rules array wasSingleRule bool // True if the originating rule was just for this side } type boxTracker struct { keyText string sides [4]boxSide allowAuto bool // If true, allow the "auto" keyword important bool // True if all active rules were flagged as "!important" key css_ast.D } type unitSafetyStatus uint8 const ( unitSafe unitSafetyStatus = iota // "margin: 0 1px 2cm 3%;" unitUnsafeSingle // "margin: 0 1vw 2vw 3vw;" unitUnsafeMixed // "margin: 0 1vw 2vh 3ch;" ) // We can only compact rules together if they have the same unit safety level. // We want to avoid a situation where the browser treats some of the original // rules as valid and others as invalid. // // Safe: // top: 1px; left: 0; bottom: 1px; right: 0; // top: 1Q; left: 2Q; bottom: 3Q; right: 4Q; // // Unsafe: // top: 1vh; left: 2vw; bottom: 3vh; right: 4vw; // top: 1Q; left: 2Q; bottom: 3Q; right: 0; // inset: 1Q 0 0 0; top: 0; // type unitSafetyTracker struct { unit string status unitSafetyStatus } func (a unitSafetyTracker) isSafeWith(b unitSafetyTracker) bool { return a.status == b.status && a.status != unitUnsafeMixed && (a.status != unitUnsafeSingle || a.unit == b.unit) } func (t *unitSafetyTracker) includeUnitOf(token css_ast.Token) { switch token.Kind { case css_lexer.TNumber: if token.Text == "0" { return } case css_lexer.TPercentage: return case css_lexer.TDimension: if token.DimensionUnitIsSafeLength() { return } else if unit := token.DimensionUnit(); t.status == unitSafe { t.status = unitUnsafeSingle t.unit = unit return } else if t.status == unitUnsafeSingle && t.unit == unit { return } } t.status = unitUnsafeMixed } func (box *boxTracker) updateSide(rules []css_ast.Rule, side int, new boxSide) { if old := box.sides[side]; old.token.Kind != css_lexer.TEndOfFile && (!new.wasSingleRule || old.wasSingleRule) && old.unitSafety.status == unitSafe && new.unitSafety.status == unitSafe { rules[old.ruleIndex] = css_ast.Rule{} } box.sides[side] = new } func (box *boxTracker) mangleSides(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, minifyWhitespace bool) { // Reset if we see a change in the "!important" flag if box.important != decl.Important { box.sides = [4]boxSide{} box.important = decl.Important } allowedIdent := "" if box.allowAuto { allowedIdent = "auto" } if quad, ok := expandTokenQuad(decl.Value, allowedIdent); ok { // Use a single tracker for the whole rule unitSafety := unitSafetyTracker{} for _, t := range quad { if !box.allowAuto || t.Kind.IsNumeric() { unitSafety.includeUnitOf(t) } } for side, t := range quad { if unitSafety.status == unitSafe { t.TurnLengthIntoNumberIfZero() } box.updateSide(rules, side, boxSide{ token: t, ruleIndex: uint32(index), unitSafety: unitSafety, }) } box.compactRules(rules, decl.KeyRange, minifyWhitespace) } else { box.sides = [4]boxSide{} } } func (box *boxTracker) mangleSide(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, minifyWhitespace bool, side int) { // Reset if we see a change in the "!important" flag if box.important != decl.Important { box.sides = [4]boxSide{} box.important = decl.Important } if tokens := decl.Value; len(tokens) == 1 { if t := tokens[0]; t.Kind.IsNumeric() || (t.Kind == css_lexer.TIdent && box.allowAuto && t.Text == "auto") { unitSafety := unitSafetyTracker{} if !box.allowAuto || t.Kind.IsNumeric() { unitSafety.includeUnitOf(t) } if unitSafety.status == unitSafe && t.TurnLengthIntoNumberIfZero() { tokens[0] = t } box.updateSide(rules, side, boxSide{ token: t, ruleIndex: uint32(index), wasSingleRule: true, unitSafety: unitSafety, }) box.compactRules(rules, decl.KeyRange, minifyWhitespace) return } } box.sides = [4]boxSide{} } func (box *boxTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, minifyWhitespace bool) { // All tokens must be present if eof := css_lexer.TEndOfFile; box.sides[0].token.Kind == eof || box.sides[1].token.Kind == eof || box.sides[2].token.Kind == eof || box.sides[3].token.Kind == eof { return } // All tokens must have the same unit for _, side := range box.sides[1:] { if !side.unitSafety.isSafeWith(box.sides[0].unitSafety) { return } } // Generate the most minimal representation tokens := compactTokenQuad( box.sides[0].token, box.sides[1].token, box.sides[2].token, box.sides[3].token, minifyWhitespace, ) // Remove all of the existing declarations rules[box.sides[0].ruleIndex] = css_ast.Rule{} rules[box.sides[1].ruleIndex] = css_ast.Rule{} rules[box.sides[2].ruleIndex] = css_ast.Rule{} rules[box.sides[3].ruleIndex] = css_ast.Rule{} // Insert the combined declaration where the last rule was rules[box.sides[3].ruleIndex].Data = &css_ast.RDeclaration{ Key: box.key, KeyText: box.keyText, Value: tokens, KeyRange: keyRange, Important: box.important, } } ================================================ FILE: lib/esbuild/css_parser/css_decls_box_shadow.go ================================================ package css_parser import ( "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) func (p *parser) mangleBoxShadow(tokens []css_ast.Token) []css_ast.Token { insetCount := 0 colorCount := 0 numbersBegin := 0 numbersCount := 0 numbersDone := false foundUnexpectedToken := false for i, t := range tokens { if t.Kind == css_lexer.TNumber || t.Kind == css_lexer.TDimension { if numbersDone { // Track if we found a non-number in between two numbers foundUnexpectedToken = true } if t.TurnLengthIntoNumberIfZero() { // "0px" => "0" tokens[i] = t } if numbersCount == 0 { // Track the index of the first number numbersBegin = i } numbersCount++ } else { if numbersCount != 0 { // Track when we find a non-number after a number numbersDone = true } if hex, ok := parseColor(t); ok { colorCount++ tokens[i] = p.mangleColor(t, hex) } else if t.Kind == css_lexer.TIdent && t.Text == "inset" { insetCount++ } else { // Track if we found a token other than a number, a color, or "inset" foundUnexpectedToken = true } } } // If everything looks like a valid rule, trim trailing zeros off the numbers. // There are three valid configurations of numbers: // // offset-x | offset-y // offset-x | offset-y | blur-radius // offset-x | offset-y | blur-radius | spread-radius // // If omitted, blur-radius and spread-radius are implied to be zero. if insetCount <= 1 && colorCount <= 1 && numbersCount > 2 && numbersCount <= 4 && !foundUnexpectedToken { numbersEnd := numbersBegin + numbersCount for numbersCount > 2 && tokens[numbersBegin+numbersCount-1].IsZero() { numbersCount-- } tokens = append(tokens[:numbersBegin+numbersCount], tokens[numbersEnd:]...) } // Set the whitespace flags for i := range tokens { var whitespace css_ast.WhitespaceFlags if i > 0 || !p.options.MinifyWhitespace { whitespace |= css_ast.WhitespaceBefore } if i+1 < len(tokens) { whitespace |= css_ast.WhitespaceAfter } tokens[i].Whitespace = whitespace } return tokens } func (p *parser) mangleBoxShadows(tokens []css_ast.Token) []css_ast.Token { n := len(tokens) end := 0 i := 0 for i < n { // Find the comma or the end of the token list comma := i for comma < n && tokens[comma].Kind != css_lexer.TComma { comma++ } // Mangle this individual shadow end += copy(tokens[end:], p.mangleBoxShadow(tokens[i:comma])) // Skip over the comma if comma < n { tokens[end] = tokens[comma] end++ comma++ } i = comma } return tokens[:end] } ================================================ FILE: lib/esbuild/css_parser/css_decls_color.go ================================================ package css_parser import ( "fmt" "math" "strconv" "strings" "github.com/withastro/compiler/lib/esbuild/compat" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) // These names are shorter than their hex codes var shortColorName = map[uint32]string{ 0x000080ff: "navy", 0x008000ff: "green", 0x008080ff: "teal", 0x4b0082ff: "indigo", 0x800000ff: "maroon", 0x800080ff: "purple", 0x808000ff: "olive", 0x808080ff: "gray", 0xa0522dff: "sienna", 0xa52a2aff: "brown", 0xc0c0c0ff: "silver", 0xcd853fff: "peru", 0xd2b48cff: "tan", 0xda70d6ff: "orchid", 0xdda0ddff: "plum", 0xee82eeff: "violet", 0xf0e68cff: "khaki", 0xf0ffffff: "azure", 0xf5deb3ff: "wheat", 0xf5f5dcff: "beige", 0xfa8072ff: "salmon", 0xfaf0e6ff: "linen", 0xff0000ff: "red", 0xff6347ff: "tomato", 0xff7f50ff: "coral", 0xffa500ff: "orange", 0xffc0cbff: "pink", 0xffd700ff: "gold", 0xffe4c4ff: "bisque", 0xfffafaff: "snow", 0xfffff0ff: "ivory", } var colorNameToHex = map[string]uint32{ "black": 0x000000ff, "silver": 0xc0c0c0ff, "gray": 0x808080ff, "white": 0xffffffff, "maroon": 0x800000ff, "red": 0xff0000ff, "purple": 0x800080ff, "fuchsia": 0xff00ffff, "green": 0x008000ff, "lime": 0x00ff00ff, "olive": 0x808000ff, "yellow": 0xffff00ff, "navy": 0x000080ff, "blue": 0x0000ffff, "teal": 0x008080ff, "aqua": 0x00ffffff, "orange": 0xffa500ff, "aliceblue": 0xf0f8ffff, "antiquewhite": 0xfaebd7ff, "aquamarine": 0x7fffd4ff, "azure": 0xf0ffffff, "beige": 0xf5f5dcff, "bisque": 0xffe4c4ff, "blanchedalmond": 0xffebcdff, "blueviolet": 0x8a2be2ff, "brown": 0xa52a2aff, "burlywood": 0xdeb887ff, "cadetblue": 0x5f9ea0ff, "chartreuse": 0x7fff00ff, "chocolate": 0xd2691eff, "coral": 0xff7f50ff, "cornflowerblue": 0x6495edff, "cornsilk": 0xfff8dcff, "crimson": 0xdc143cff, "cyan": 0x00ffffff, "darkblue": 0x00008bff, "darkcyan": 0x008b8bff, "darkgoldenrod": 0xb8860bff, "darkgray": 0xa9a9a9ff, "darkgreen": 0x006400ff, "darkgrey": 0xa9a9a9ff, "darkkhaki": 0xbdb76bff, "darkmagenta": 0x8b008bff, "darkolivegreen": 0x556b2fff, "darkorange": 0xff8c00ff, "darkorchid": 0x9932ccff, "darkred": 0x8b0000ff, "darksalmon": 0xe9967aff, "darkseagreen": 0x8fbc8fff, "darkslateblue": 0x483d8bff, "darkslategray": 0x2f4f4fff, "darkslategrey": 0x2f4f4fff, "darkturquoise": 0x00ced1ff, "darkviolet": 0x9400d3ff, "deeppink": 0xff1493ff, "deepskyblue": 0x00bfffff, "dimgray": 0x696969ff, "dimgrey": 0x696969ff, "dodgerblue": 0x1e90ffff, "firebrick": 0xb22222ff, "floralwhite": 0xfffaf0ff, "forestgreen": 0x228b22ff, "gainsboro": 0xdcdcdcff, "ghostwhite": 0xf8f8ffff, "gold": 0xffd700ff, "goldenrod": 0xdaa520ff, "greenyellow": 0xadff2fff, "grey": 0x808080ff, "honeydew": 0xf0fff0ff, "hotpink": 0xff69b4ff, "indianred": 0xcd5c5cff, "indigo": 0x4b0082ff, "ivory": 0xfffff0ff, "khaki": 0xf0e68cff, "lavender": 0xe6e6faff, "lavenderblush": 0xfff0f5ff, "lawngreen": 0x7cfc00ff, "lemonchiffon": 0xfffacdff, "lightblue": 0xadd8e6ff, "lightcoral": 0xf08080ff, "lightcyan": 0xe0ffffff, "lightgoldenrodyellow": 0xfafad2ff, "lightgray": 0xd3d3d3ff, "lightgreen": 0x90ee90ff, "lightgrey": 0xd3d3d3ff, "lightpink": 0xffb6c1ff, "lightsalmon": 0xffa07aff, "lightseagreen": 0x20b2aaff, "lightskyblue": 0x87cefaff, "lightslategray": 0x778899ff, "lightslategrey": 0x778899ff, "lightsteelblue": 0xb0c4deff, "lightyellow": 0xffffe0ff, "limegreen": 0x32cd32ff, "linen": 0xfaf0e6ff, "magenta": 0xff00ffff, "mediumaquamarine": 0x66cdaaff, "mediumblue": 0x0000cdff, "mediumorchid": 0xba55d3ff, "mediumpurple": 0x9370dbff, "mediumseagreen": 0x3cb371ff, "mediumslateblue": 0x7b68eeff, "mediumspringgreen": 0x00fa9aff, "mediumturquoise": 0x48d1ccff, "mediumvioletred": 0xc71585ff, "midnightblue": 0x191970ff, "mintcream": 0xf5fffaff, "mistyrose": 0xffe4e1ff, "moccasin": 0xffe4b5ff, "navajowhite": 0xffdeadff, "oldlace": 0xfdf5e6ff, "olivedrab": 0x6b8e23ff, "orangered": 0xff4500ff, "orchid": 0xda70d6ff, "palegoldenrod": 0xeee8aaff, "palegreen": 0x98fb98ff, "paleturquoise": 0xafeeeeff, "palevioletred": 0xdb7093ff, "papayawhip": 0xffefd5ff, "peachpuff": 0xffdab9ff, "peru": 0xcd853fff, "pink": 0xffc0cbff, "plum": 0xdda0ddff, "powderblue": 0xb0e0e6ff, "rosybrown": 0xbc8f8fff, "royalblue": 0x4169e1ff, "saddlebrown": 0x8b4513ff, "salmon": 0xfa8072ff, "sandybrown": 0xf4a460ff, "seagreen": 0x2e8b57ff, "seashell": 0xfff5eeff, "sienna": 0xa0522dff, "skyblue": 0x87ceebff, "slateblue": 0x6a5acdff, "slategray": 0x708090ff, "slategrey": 0x708090ff, "snow": 0xfffafaff, "springgreen": 0x00ff7fff, "steelblue": 0x4682b4ff, "tan": 0xd2b48cff, "thistle": 0xd8bfd8ff, "tomato": 0xff6347ff, "turquoise": 0x40e0d0ff, "violet": 0xee82eeff, "wheat": 0xf5deb3ff, "whitesmoke": 0xf5f5f5ff, "yellowgreen": 0x9acd32ff, "rebeccapurple": 0x663399ff, } func parseHex(text string) (uint32, bool) { hex := uint32(0) for _, c := range text { hex <<= 4 switch { case c >= '0' && c <= '9': hex |= uint32(c) - '0' case c >= 'a' && c <= 'f': hex |= uint32(c) - ('a' - 10) case c >= 'A' && c <= 'F': hex |= uint32(c) - ('A' - 10) default: return 0, false } } return hex, true } // 0xAABBCCDD => 0xABCD func compactHex(v uint32) uint32 { return ((v & 0x0FF00000) >> 12) | ((v & 0x00000FF0) >> 4) } // 0xABCD => 0xAABBCCDD func expandHex(v uint32) uint32 { return ((v & 0xF000) << 16) | ((v & 0xFF00) << 12) | ((v & 0x0FF0) << 8) | ((v & 0x00FF) << 4) | (v & 0x000F) } func hexR(v uint32) int { return int(v >> 24) } func hexG(v uint32) int { return int((v >> 16) & 255) } func hexB(v uint32) int { return int((v >> 8) & 255) } func hexA(v uint32) int { return int(v & 255) } func floatToStringForColor(a float64) string { text := fmt.Sprintf("%.03f", a) for text[len(text)-1] == '0' { text = text[:len(text)-1] } if text[len(text)-1] == '.' { text = text[:len(text)-1] } return text } func degreesForAngle(token css_ast.Token) (float64, bool) { switch token.Kind { case css_lexer.TNumber: if value, err := strconv.ParseFloat(token.Text, 64); err == nil { return value, true } case css_lexer.TDimension: if value, err := strconv.ParseFloat(token.DimensionValue(), 64); err == nil { switch token.DimensionUnit() { case "deg": return value, true case "grad": return value * (360.0 / 400.0), true case "rad": return value * (180.0 / math.Pi), true case "turn": return value * 360.0, true } } } return 0, false } func lowerAlphaPercentageToNumber(token css_ast.Token) css_ast.Token { if token.Kind == css_lexer.TPercentage { if value, err := strconv.ParseFloat(token.Text[:len(token.Text)-1], 64); err == nil { token.Kind = css_lexer.TNumber token.Text = floatToStringForColor(value / 100.0) } } return token } // Convert newer color syntax to older color syntax for older browsers func (p *parser) lowerColor(token css_ast.Token) css_ast.Token { text := token.Text switch token.Kind { case css_lexer.THash: if p.options.UnsupportedCSSFeatures.Has(compat.HexRGBA) { switch len(text) { case 4: // "#1234" => "rgba(1, 2, 3, 0.004)" if hex, ok := parseHex(text); ok { hex = expandHex(hex) token.Kind = css_lexer.TFunction token.Text = "rgba" commaToken := p.commaToken() token.Children = &[]css_ast.Token{ {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: floatToStringForColor(float64(hexA(hex)) / 255)}, } } case 8: // "#12345678" => "rgba(18, 52, 86, 0.47)" if hex, ok := parseHex(text); ok { token.Kind = css_lexer.TFunction token.Text = "rgba" commaToken := p.commaToken() token.Children = &[]css_ast.Token{ {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: floatToStringForColor(float64(hexA(hex)) / 255)}, } } } } case css_lexer.TIdent: if text == "rebeccapurple" && p.options.UnsupportedCSSFeatures.Has(compat.RebeccaPurple) { token.Kind = css_lexer.THash token.Text = "663399" } case css_lexer.TFunction: switch text { case "rgb", "rgba", "hsl", "hsla": if p.options.UnsupportedCSSFeatures.Has(compat.Modern_RGB_HSL) { args := *token.Children removeAlpha := false addAlpha := false // "hsl(1deg, 2%, 3%)" => "hsl(1, 2%, 3%)" if (text == "hsl" || text == "hsla") && len(args) > 0 { if degrees, ok := degreesForAngle(args[0]); ok { args[0].Kind = css_lexer.TNumber args[0].Text = floatToStringForColor(degrees) } } // These check for "IsNumeric" to reject "var()" since a single "var()" // can substitute for multiple tokens and that messes up pattern matching switch len(args) { case 3: // "rgba(1 2 3)" => "rgb(1, 2, 3)" // "hsla(1 2% 3%)" => "hsl(1, 2%, 3%)" if args[0].Kind.IsNumeric() && args[1].Kind.IsNumeric() && args[2].Kind.IsNumeric() { removeAlpha = true args[0].Whitespace = 0 args[1].Whitespace = 0 commaToken := p.commaToken() token.Children = &[]css_ast.Token{ args[0], commaToken, args[1], commaToken, args[2], } } case 5: // "rgba(1, 2, 3)" => "rgb(1, 2, 3)" // "hsla(1, 2%, 3%)" => "hsl(1%, 2%, 3%)" if args[0].Kind.IsNumeric() && args[1].Kind == css_lexer.TComma && args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TComma && args[4].Kind.IsNumeric() { removeAlpha = true break } // "rgb(1 2 3 / 4%)" => "rgba(1, 2, 3, 0.04)" // "hsl(1 2% 3% / 4%)" => "hsla(1, 2%, 3%, 0.04)" if args[0].Kind.IsNumeric() && args[1].Kind.IsNumeric() && args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TDelimSlash && args[4].Kind.IsNumeric() { addAlpha = true args[0].Whitespace = 0 args[1].Whitespace = 0 args[2].Whitespace = 0 commaToken := p.commaToken() token.Children = &[]css_ast.Token{ args[0], commaToken, args[1], commaToken, args[2], commaToken, lowerAlphaPercentageToNumber(args[4]), } } case 7: // "rgb(1%, 2%, 3%, 4%)" => "rgba(1%, 2%, 3%, 0.04)" // "hsl(1, 2%, 3%, 4%)" => "hsla(1, 2%, 3%, 0.04)" if args[0].Kind.IsNumeric() && args[1].Kind == css_lexer.TComma && args[2].Kind.IsNumeric() && args[3].Kind == css_lexer.TComma && args[4].Kind.IsNumeric() && args[5].Kind == css_lexer.TComma && args[6].Kind.IsNumeric() { addAlpha = true args[6] = lowerAlphaPercentageToNumber(args[6]) } } if removeAlpha { if text == "rgba" { token.Text = "rgb" } else if text == "hsla" { token.Text = "hsl" } } else if addAlpha { if text == "rgb" { token.Text = "rgba" } else if text == "hsl" { token.Text = "hsla" } } } } } return token } func parseColor(token css_ast.Token) (uint32, bool) { text := token.Text switch token.Kind { case css_lexer.TIdent: if hex, ok := colorNameToHex[strings.ToLower(text)]; ok { return hex, true } case css_lexer.THash: switch len(text) { case 3: // "#123" if hex, ok := parseHex(text); ok { return (expandHex(hex) << 8) | 0xFF, true } case 4: // "#1234" if hex, ok := parseHex(text); ok { return expandHex(hex), true } case 6: // "#112233" if hex, ok := parseHex(text); ok { return (hex << 8) | 0xFF, true } case 8: // "#11223344" if hex, ok := parseHex(text); ok { return hex, true } } case css_lexer.TFunction: switch text { case "rgb", "rgba": args := *token.Children var r, g, b, a css_ast.Token switch len(args) { case 3: // "rgb(1 2 3)" r, g, b = args[0], args[1], args[2] case 5: // "rgba(1, 2, 3)" if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma { r, g, b = args[0], args[2], args[4] break } // "rgb(1 2 3 / 4%)" if args[3].Kind == css_lexer.TDelimSlash { r, g, b, a = args[0], args[1], args[2], args[4] } case 7: // "rgb(1%, 2%, 3%, 4%)" if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma && args[5].Kind == css_lexer.TComma { r, g, b, a = args[0], args[2], args[4], args[6] } } if r, ok := parseColorByte(r, 1); ok { if g, ok := parseColorByte(g, 1); ok { if b, ok := parseColorByte(b, 1); ok { if a, ok := parseAlphaByte(a); ok { return uint32((r << 24) | (g << 16) | (b << 8) | a), true } } } } case "hsl", "hsla": args := *token.Children var h, s, l, a css_ast.Token switch len(args) { case 3: // "hsl(1 2 3)" h, s, l = args[0], args[1], args[2] case 5: // "hsla(1, 2, 3)" if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma { h, s, l = args[0], args[2], args[4] break } // "hsl(1 2 3 / 4%)" if args[3].Kind == css_lexer.TDelimSlash { h, s, l, a = args[0], args[1], args[2], args[4] } case 7: // "hsl(1%, 2%, 3%, 4%)" if args[1].Kind == css_lexer.TComma && args[3].Kind == css_lexer.TComma && args[5].Kind == css_lexer.TComma { h, s, l, a = args[0], args[2], args[4], args[6] } } // Convert from HSL to RGB. The algorithm is from the section // "Converting HSL colors to sRGB colors" in the specification. if h, ok := degreesForAngle(h); ok { if s, ok := s.FractionForPercentage(); ok { if l, ok := l.FractionForPercentage(); ok { if a, ok := parseAlphaByte(a); ok { h /= 360.0 var t2 float64 if l <= 0.5 { t2 = l * (s + 1) } else { t2 = l + s - (l * s) } t1 := l*2 - t2 r := hueToRgb(t1, t2, h+1.0/3.0) g := hueToRgb(t1, t2, h) b := hueToRgb(t1, t2, h-1.0/3.0) return uint32((r << 24) | (g << 16) | (b << 8) | a), true } } } } } } return 0, false } func hueToRgb(t1 float64, t2 float64, hue float64) uint32 { hue -= math.Floor(hue) hue *= 6.0 var f float64 if hue < 1 { f = (t2-t1)*hue + t1 } else if hue < 3 { f = t2 } else if hue < 4 { f = (t2-t1)*(4-hue) + t1 } else { f = t1 } i := int(math.Round(f * 255)) if i < 0 { i = 0 } else if i > 255 { i = 255 } return uint32(i) } func parseAlphaByte(token css_ast.Token) (uint32, bool) { if token.Kind == css_lexer.T(0) { return 255, true } return parseColorByte(token, 255) } func parseColorByte(token css_ast.Token, scale float64) (uint32, bool) { var i int var ok bool switch token.Kind { case css_lexer.TNumber: if f, err := strconv.ParseFloat(token.Text, 64); err == nil { i = int(math.Round(f * scale)) ok = true } case css_lexer.TPercentage: if f, err := strconv.ParseFloat(token.PercentageValue(), 64); err == nil { i = int(math.Round(f * (255.0 / 100.0))) ok = true } } if i < 0 { i = 0 } else if i > 255 { i = 255 } return uint32(i), ok } func (p *parser) mangleColor(token css_ast.Token, hex uint32) css_ast.Token { // Note: Do NOT remove color information from fully transparent colors. // Safari behaves differently than other browsers for color interpolation: // https://css-tricks.com/thing-know-gradients-transparent-black/ if hexA(hex) == 255 { token.Children = nil if name, ok := shortColorName[hex]; ok { token.Kind = css_lexer.TIdent token.Text = name } else { token.Kind = css_lexer.THash hex >>= 8 compact := compactHex(hex) if hex == expandHex(compact) { token.Text = fmt.Sprintf("%03x", compact) } else { token.Text = fmt.Sprintf("%06x", hex) } } } else if !p.options.UnsupportedCSSFeatures.Has(compat.HexRGBA) { token.Children = nil token.Kind = css_lexer.THash compact := compactHex(hex) if hex == expandHex(compact) { token.Text = fmt.Sprintf("%04x", compact) } else { token.Text = fmt.Sprintf("%08x", hex) } } else { token.Kind = css_lexer.TFunction token.Text = "rgba" commaToken := p.commaToken() index := hexA(hex) * 4 alpha := alphaFractionTable[index : index+4] if space := strings.IndexByte(alpha, ' '); space != -1 { alpha = alpha[:space] } token.Children = &[]css_ast.Token{ {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexR(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexG(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: strconv.Itoa(hexB(hex))}, commaToken, {Kind: css_lexer.TNumber, Text: alpha}, } } return token } // Every four characters in this table is the fraction for that index const alphaFractionTable string = "" + "0 .004.008.01 .016.02 .024.027.03 .035.04 .043.047.05 .055.06 " + ".063.067.07 .075.08 .082.086.09 .094.098.1 .106.11 .114.118.12 " + ".125.13 .133.137.14 .145.15 .153.157.16 .165.17 .173.176.18 .184" + ".19 .192.196.2 .204.208.21 .216.22 .224.227.23 .235.24 .243.247" + ".25 .255.26 .263.267.27 .275.28 .282.286.29 .294.298.3 .306.31 " + ".314.318.32 .325.33 .333.337.34 .345.35 .353.357.36 .365.37 .373" + ".376.38 .384.39 .392.396.4 .404.408.41 .416.42 .424.427.43 .435" + ".44 .443.447.45 .455.46 .463.467.47 .475.48 .482.486.49 .494.498" + ".5 .506.51 .514.518.52 .525.53 .533.537.54 .545.55 .553.557.56 " + ".565.57 .573.576.58 .584.59 .592.596.6 .604.608.61 .616.62 .624" + ".627.63 .635.64 .643.647.65 .655.66 .663.667.67 .675.68 .682.686" + ".69 .694.698.7 .706.71 .714.718.72 .725.73 .733.737.74 .745.75 " + ".753.757.76 .765.77 .773.776.78 .784.79 .792.796.8 .804.808.81 " + ".816.82 .824.827.83 .835.84 .843.847.85 .855.86 .863.867.87 .875" + ".88 .882.886.89 .894.898.9 .906.91 .914.918.92 .925.93 .933.937" + ".94 .945.95 .953.957.96 .965.97 .973.976.98 .984.99 .992.9961 " ================================================ FILE: lib/esbuild/css_parser/css_decls_font.go ================================================ package css_parser import ( "strconv" "strings" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) // Specification: https://drafts.csswg.org/css-fonts/#font-prop // [ <font-style> || <font-variant-css2> || <font-weight> || <font-stretch-css3> ]? <font-size> [ / <line-height> ]? <font-family> func (p *parser) mangleFont(tokens []css_ast.Token) []css_ast.Token { var result []css_ast.Token // Scan up to the font size pos := 0 for ; pos < len(tokens); pos++ { token := tokens[pos] if isFontSize(token) { break } switch token.Kind { case css_lexer.TIdent: switch strings.ToLower(token.Text) { case "normal": // "All subproperties of the font property are first reset to their initial values" // This implies that "normal" doesn't do anything. Also all of the optional values // contain "normal" as an option and they are unordered so it's impossible to say // what property "normal" corresponds to. Just drop these tokens to save space. continue // <font-style> case "italic": case "oblique": if pos+1 < len(tokens) && tokens[pos+1].IsAngle() { result = append(result, token, tokens[pos+1]) pos++ continue } // <font-variant-css2> case "small-caps": // <font-weight> case "bold", "bolder", "lighter": result = append(result, p.mangleFontWeight(token)) continue // <font-stretch-css3> case "ultra-condensed", "extra-condensed", "condensed", "semi-condensed", "semi-expanded", "expanded", "extra-expanded", "ultra-expanded": default: // All other tokens are unrecognized, so we bail if we hit one return tokens } result = append(result, token) case css_lexer.TNumber: // "Only values greater than or equal to 1, and less than or equal to // 1000, are valid, and all other values are invalid." if value, err := strconv.ParseFloat(token.Text, 64); err != nil || value < 1 || value > 1000 { return tokens } result = append(result, token) default: // All other tokens are unrecognized, so we bail if we hit one return tokens } } // <font-size> if pos == len(tokens) { return tokens } result = append(result, tokens[pos]) pos++ // / <line-height> if pos < len(tokens) && tokens[pos].Kind == css_lexer.TDelimSlash { if pos+1 == len(tokens) { return tokens } result = append(result, tokens[pos], tokens[pos+1]) pos += 2 // Remove the whitespace around the "/" character if p.options.MinifyWhitespace { result[len(result)-3].Whitespace &= ^css_ast.WhitespaceAfter result[len(result)-2].Whitespace = 0 result[len(result)-1].Whitespace &= ^css_ast.WhitespaceBefore } } // <font-family> if family, ok := p.mangleFontFamily(tokens[pos:]); ok { return append(result, family...) } return tokens } var fontSizeKeywords = map[string]bool{ // <absolute-size>: https://drafts.csswg.org/css-fonts/#valdef-font-size-absolute-size "xx-small": true, "x-small": true, "small": true, "medium": true, "large": true, "x-large": true, "xx-large": true, "xxx-large": true, // <relative-size>: https://drafts.csswg.org/css-fonts/#valdef-font-size-relative-size "larger": true, "smaller": true, } // Specification: https://drafts.csswg.org/css-fonts/#font-size-prop func isFontSize(token css_ast.Token) bool { // <length-percentage> if token.Kind == css_lexer.TDimension || token.Kind == css_lexer.TPercentage { return true } // <absolute-size> or <relative-size> if token.Kind == css_lexer.TIdent { _, ok := fontSizeKeywords[strings.ToLower(token.Text)] return ok } return false } ================================================ FILE: lib/esbuild/css_parser/css_decls_font_family.go ================================================ package css_parser import ( "strings" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) // Specification: https://drafts.csswg.org/css-values-4/#common-keywords var wideKeywords = map[string]bool{ "initial": true, "inherit": true, "unset": true, } // Specification: https://drafts.csswg.org/css-fonts/#generic-font-families var genericFamilyNames = map[string]bool{ "serif": true, "sans-serif": true, "cursive": true, "fantasy": true, "monospace": true, "system-ui": true, "emoji": true, "math": true, "fangsong": true, "ui-serif": true, "ui-sans-serif": true, "ui-monospace": true, "ui-rounded": true, } // Specification: https://drafts.csswg.org/css-fonts/#font-family-prop func (p *parser) mangleFontFamily(tokens []css_ast.Token) ([]css_ast.Token, bool) { result, rest, ok := p.mangleFamilyNameOrGenericName(nil, tokens) if !ok { return nil, false } for len(rest) > 0 && rest[0].Kind == css_lexer.TComma { result, rest, ok = p.mangleFamilyNameOrGenericName(append(result, rest[0]), rest[1:]) if !ok { return nil, false } } if len(rest) > 0 { return nil, false } return result, true } func (p *parser) mangleFamilyNameOrGenericName(result []css_ast.Token, tokens []css_ast.Token) ([]css_ast.Token, []css_ast.Token, bool) { if len(tokens) > 0 { t := tokens[0] // Handle <generic-family> if t.Kind == css_lexer.TIdent && genericFamilyNames[t.Text] { return append(result, t), tokens[1:], true } // Handle <family-name> if t.Kind == css_lexer.TString { // "If a sequence of identifiers is given as a <family-name>, the computed // value is the name converted to a string by joining all the identifiers // in the sequence by single spaces." // // More information: https://mathiasbynens.be/notes/unquoted-font-family names := strings.Split(t.Text, " ") for _, name := range names { if !isValidCustomIdent(name, genericFamilyNames) { return append(result, t), tokens[1:], true } } for i, name := range names { var whitespace css_ast.WhitespaceFlags if i != 0 || !p.options.MinifyWhitespace { whitespace = css_ast.WhitespaceBefore } result = append(result, css_ast.Token{ Kind: css_lexer.TIdent, Text: name, Whitespace: whitespace, }) } return result, tokens[1:], true } // "Font family names other than generic families must either be given // quoted as <string>s, or unquoted as a sequence of one or more // <custom-ident>." if t.Kind == css_lexer.TIdent { for { if !isValidCustomIdent(t.Text, genericFamilyNames) { return nil, nil, false } result = append(result, t) tokens = tokens[1:] if len(tokens) == 0 || tokens[0].Kind != css_lexer.TIdent { break } t = tokens[0] } return result, tokens, true } } // Anything other than the cases listed above causes us to bail return nil, nil, false } // Specification: https://drafts.csswg.org/css-values-4/#custom-idents func isValidCustomIdent(text string, predefinedKeywords map[string]bool) bool { loweredText := strings.ToLower(text) if predefinedKeywords[loweredText] { return false } if wideKeywords[loweredText] { return false } if loweredText == "default" { return false } if loweredText == "" { return false } // validate if it contains characters which needs to be escaped if !css_lexer.WouldStartIdentifierWithoutEscapes(text) { return false } for _, c := range text { if !css_lexer.IsNameContinue(c) { return false } } return true } ================================================ FILE: lib/esbuild/css_parser/css_decls_font_weight.go ================================================ package css_parser import ( "strings" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) func (p *parser) mangleFontWeight(token css_ast.Token) css_ast.Token { if token.Kind != css_lexer.TIdent { return token } switch strings.ToLower(token.Text) { case "normal": token.Text = "400" token.Kind = css_lexer.TNumber case "bold": token.Text = "700" token.Kind = css_lexer.TNumber } return token } ================================================ FILE: lib/esbuild/css_parser/css_decls_transform.go ================================================ package css_parser import ( "strings" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) func turnPercentIntoNumberIfShorter(t *css_ast.Token) { if t.Kind == css_lexer.TPercentage { if shifted, ok := shiftDot(t.PercentageValue(), -2); ok && len(shifted) < len(t.Text) { t.Kind = css_lexer.TNumber t.Text = shifted } } } // https://www.w3.org/TR/css-transforms-1/#two-d-transform-functions // https://drafts.csswg.org/css-transforms-2/#transform-functions func (p *parser) mangleTransforms(tokens []css_ast.Token) []css_ast.Token { for i := range tokens { if token := &tokens[i]; token.Kind == css_lexer.TFunction { if args := *token.Children; css_ast.TokensAreCommaSeparated(args) { n := len(args) switch strings.ToLower(token.Text) { //////////////////////////////////////////////////////////////////////////////// // 2D transforms case "matrix": // specifies a 2D transformation in the form of a transformation // matrix of the six values a, b, c, d, e, f. if n == 11 { // | a c 0 e | // | b d 0 f | // | 0 0 1 0 | // | 0 0 0 1 | a, b, c, d, e, f := args[0], args[2], args[4], args[6], args[8], args[10] if b.IsZero() && c.IsZero() && e.IsZero() && f.IsZero() { // | a 0 0 0 | // | 0 d 0 0 | // | 0 0 1 0 | // | 0 0 0 1 | if a.EqualIgnoringWhitespace(d) { // "matrix(a, 0, 0, a, 0, 0)" => "scale(a)" token.Text = "scale" *token.Children = args[:1] } else if d.IsOne() { // "matrix(a, 0, 0, 1, 0, 0)" => "scaleX(a)" token.Text = "scaleX" *token.Children = args[:1] } else if a.IsOne() { // "matrix(1, 0, 0, d, 0, 0)" => "scaleY(d)" token.Text = "scaleY" *token.Children = args[6:7] } else { // "matrix(a, 0, 0, d, 0, 0)" => "scale(a, d)" token.Text = "scale" *token.Children = append(args[:2], d) } // Note: A "matrix" cannot be directly converted into a "translate" // because "translate" requires units while "matrix" requires no // units. I'm not sure exactly what the semantics are so I'm not // sure if you can just add "px" or not. Even if that did work, // you still couldn't substitute values containing "var()" since // units would still not be substituted in that case. } } case "translate": // specifies a 2D translation by the vector [tx, ty], where tx is the // first translation-value parameter and ty is the optional second // translation-value parameter. If <ty> is not provided, ty has zero // as a value. if n == 1 { args[0].TurnLengthOrPercentageIntoNumberIfZero() } else if n == 3 { tx, ty := &args[0], &args[2] tx.TurnLengthOrPercentageIntoNumberIfZero() ty.TurnLengthOrPercentageIntoNumberIfZero() if ty.IsZero() { // "translate(tx, 0)" => "translate(tx)" *token.Children = args[:1] } else if tx.IsZero() { // "translate(0, ty)" => "translateY(ty)" token.Text = "translateY" *token.Children = args[2:] } } case "translatex": // specifies a translation by the given amount in the X direction. if n == 1 { // "translateX(tx)" => "translate(tx)" token.Text = "translate" args[0].TurnLengthOrPercentageIntoNumberIfZero() } case "translatey": // specifies a translation by the given amount in the Y direction. if n == 1 { args[0].TurnLengthOrPercentageIntoNumberIfZero() } case "scale": // specifies a 2D scale operation by the [sx,sy] scaling vector // described by the 2 parameters. If the second parameter is not // provided, it takes a value equal to the first. For example, // scale(1, 1) would leave an element unchanged, while scale(2, 2) // would cause it to appear twice as long in both the X and Y axes, // or four times its typical geometric size. if n == 1 { turnPercentIntoNumberIfShorter(&args[0]) } else if n == 3 { sx, sy := &args[0], &args[2] turnPercentIntoNumberIfShorter(sx) turnPercentIntoNumberIfShorter(sy) if sx.EqualIgnoringWhitespace(*sy) { // "scale(s, s)" => "scale(s)" *token.Children = args[:1] } else if sy.IsOne() { // "scale(s, 1)" => "scaleX(s)" token.Text = "scaleX" *token.Children = args[:1] } else if sx.IsOne() { // "scale(1, s)" => "scaleY(s)" token.Text = "scaleY" *token.Children = args[2:] } } case "scalex": // specifies a 2D scale operation using the [sx,1] scaling vector, // where sx is given as the parameter. if n == 1 { turnPercentIntoNumberIfShorter(&args[0]) } case "scaley": // specifies a 2D scale operation using the [1,sy] scaling vector, // where sy is given as the parameter. if n == 1 { turnPercentIntoNumberIfShorter(&args[0]) } case "rotate": // specifies a 2D rotation by the angle specified in the parameter // about the origin of the element, as defined by the // transform-origin property. For example, rotate(90deg) would // cause elements to appear rotated one-quarter of a turn in the // clockwise direction. if n == 1 { args[0].TurnLengthIntoNumberIfZero() } // Note: This is considered a 2D transform even though it's specified // in terms of a 3D transform because it doesn't trigger Safari's 3D // transform bugs. case "rotatez": // same as rotate3d(0, 0, 1, <angle>), which is a 3d transform // equivalent to the 2d transform rotate(<angle>). if n == 1 { // "rotateZ(angle)" => "rotate(angle)" token.Text = "rotate" args[0].TurnLengthIntoNumberIfZero() } case "skew": // specifies a 2D skew by [ax,ay] for X and Y. If the second // parameter is not provided, it has a zero value. if n == 1 { args[0].TurnLengthIntoNumberIfZero() } else if n == 3 { ax, ay := &args[0], &args[2] ax.TurnLengthIntoNumberIfZero() ay.TurnLengthIntoNumberIfZero() if ay.IsZero() { // "skew(ax, 0)" => "skew(ax)" *token.Children = args[:1] } } case "skewx": // specifies a 2D skew transformation along the X axis by the given // angle. if n == 1 { // "skewX(ax)" => "skew(ax)" token.Text = "skew" args[0].TurnLengthIntoNumberIfZero() } case "skewy": // specifies a 2D skew transformation along the Y axis by the given // angle. if n == 1 { args[0].TurnLengthIntoNumberIfZero() } //////////////////////////////////////////////////////////////////////////////// // 3D transforms // Note: Safari has a bug where 3D transforms render differently than // other transforms. This means we should not minify a 3D transform // into a 2D transform or it will cause a rendering difference in // Safari. case "matrix3d": // specifies a 3D transformation as a 4x4 homogeneous matrix of 16 // values in column-major order. if n == 31 { // | m0 m4 m8 m12 | // | m1 m5 m9 m13 | // | m2 m6 m10 m14 | // | m3 m7 m11 m15 | mask := uint32(0) for i := 0; i < 16; i++ { if arg := args[i*2]; arg.IsZero() { mask |= 1 << i } else if arg.IsOne() { mask |= (1 << 16) << i } } const onlyScale = 0b1000_0000_0000_0000_0111_1011_1101_1110 if (mask & onlyScale) == onlyScale { // | m0 0 0 0 | // | 0 m5 0 0 | // | 0 0 m10 0 | // | 0 0 0 1 | sx, sy := args[0], args[10] if sx.IsOne() && sy.IsOne() { token.Text = "scaleZ" *token.Children = args[20:21] } else { token.Text = "scale3d" *token.Children = append(append(args[0:2], args[10:12]...), args[20]) } } // Note: A "matrix3d" cannot be directly converted into a "translate3d" // because "translate3d" requires units while "matrix3d" requires no // units. I'm not sure exactly what the semantics are so I'm not // sure if you can just add "px" or not. Even if that did work, // you still couldn't substitute values containing "var()" since // units would still not be substituted in that case. } case "translate3d": // specifies a 3D translation by the vector [tx,ty,tz], with tx, // ty and tz being the first, second and third translation-value // parameters respectively. if n == 5 { tx, ty, tz := &args[0], &args[2], &args[4] tx.TurnLengthOrPercentageIntoNumberIfZero() ty.TurnLengthOrPercentageIntoNumberIfZero() tz.TurnLengthIntoNumberIfZero() if tx.IsZero() && ty.IsZero() { // "translate3d(0, 0, tz)" => "translateZ(tz)" token.Text = "translateZ" *token.Children = args[4:] } } case "translatez": // specifies a 3D translation by the vector [0,0,tz] with the given // amount in the Z direction. if n == 1 { args[0].TurnLengthIntoNumberIfZero() } case "scale3d": // specifies a 3D scale operation by the [sx,sy,sz] scaling vector // described by the 3 parameters. if n == 5 { sx, sy, sz := &args[0], &args[2], &args[4] turnPercentIntoNumberIfShorter(sx) turnPercentIntoNumberIfShorter(sy) turnPercentIntoNumberIfShorter(sz) if sx.IsOne() && sy.IsOne() { // "scale3d(1, 1, sz)" => "scaleZ(sz)" token.Text = "scaleZ" *token.Children = args[4:] } } case "scalez": // specifies a 3D scale operation using the [1,1,sz] scaling vector, // where sz is given as the parameter. if n == 1 { turnPercentIntoNumberIfShorter(&args[0]) } case "rotate3d": // specifies a 3D rotation by the angle specified in last parameter // about the [x,y,z] direction vector described by the first three // parameters. A direction vector that cannot be normalized, such as // [0,0,0], will cause the rotation to not be applied. if n == 7 { x, y, z, angle := &args[0], &args[2], &args[4], &args[6] angle.TurnLengthIntoNumberIfZero() if x.IsOne() && y.IsZero() && z.IsZero() { // "rotate3d(1, 0, 0, angle)" => "rotateX(angle)" token.Text = "rotateX" *token.Children = args[6:] } else if x.IsZero() && y.IsOne() && z.IsZero() { // "rotate3d(0, 1, 0, angle)" => "rotateY(angle)" token.Text = "rotateY" *token.Children = args[6:] } } case "rotatex": // same as rotate3d(1, 0, 0, <angle>). if n == 1 { args[0].TurnLengthIntoNumberIfZero() } case "rotatey": // same as rotate3d(0, 1, 0, <angle>). if n == 1 { args[0].TurnLengthIntoNumberIfZero() } case "perspective": // specifies a perspective projection matrix. This matrix scales // points in X and Y based on their Z value, scaling points with // positive Z values away from the origin, and those with negative Z // values towards the origin. Points on the z=0 plane are unchanged. // The parameter represents the distance of the z=0 plane from the // viewer. if n == 1 { args[0].TurnLengthIntoNumberIfZero() } } // Trim whitespace at the ends if args := *token.Children; len(args) > 0 { args[0].Whitespace &= ^css_ast.WhitespaceBefore args[len(args)-1].Whitespace &= ^css_ast.WhitespaceAfter } } } } return tokens } ================================================ FILE: lib/esbuild/css_parser/css_parser.go ================================================ package css_parser import ( "fmt" "strings" "github.com/withastro/compiler/lib/esbuild/ast" "github.com/withastro/compiler/lib/esbuild/compat" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" "github.com/withastro/compiler/lib/esbuild/logger" ) // This is mostly a normal CSS parser with one exception: the addition of // support for parsing https://drafts.csswg.org/css-nesting-1/. type parser struct { log logger.Log source logger.Source tokens []css_lexer.Token legalComments []css_lexer.Comment stack []css_lexer.T importRecords []ast.ImportRecord tracker logger.LineColumnTracker index int end int legalCommentIndex int prevError logger.Loc options Options } type Options struct { OriginalTargetEnv string UnsupportedCSSFeatures compat.CSSFeature MinifySyntax bool MinifyWhitespace bool } func Parse(log logger.Log, source logger.Source, options Options) css_ast.AST { result := css_lexer.Tokenize(log, source) p := parser{ log: log, source: source, tracker: logger.MakeLineColumnTracker(&source), options: options, tokens: result.Tokens, legalComments: result.LegalComments, prevError: logger.Loc{Start: -1}, } p.end = len(p.tokens) rules := p.parseListOfRules(ruleContext{ isTopLevel: true, parseSelectors: true, }) p.expect(css_lexer.TEndOfFile) return css_ast.AST{ Rules: rules, ImportRecords: p.importRecords, ApproximateLineCount: result.ApproximateLineCount, SourceMapComment: result.SourceMapComment, } } func (p *parser) advance() { if p.index < p.end { p.index++ } } func (p *parser) at(index int) css_lexer.Token { if index < p.end { return p.tokens[index] } if p.end < len(p.tokens) { return css_lexer.Token{ Kind: css_lexer.TEndOfFile, Range: logger.Range{Loc: p.tokens[p.end].Range.Loc}, } } return css_lexer.Token{ Kind: css_lexer.TEndOfFile, Range: logger.Range{Loc: logger.Loc{Start: int32(len(p.source.Contents))}}, } } func (p *parser) current() css_lexer.Token { return p.at(p.index) } func (p *parser) next() css_lexer.Token { return p.at(p.index + 1) } func (p *parser) raw() string { t := p.current() return p.source.Contents[t.Range.Loc.Start:t.Range.End()] } func (p *parser) decoded() string { return p.current().DecodedText(p.source.Contents) } func (p *parser) peek(kind css_lexer.T) bool { return kind == p.current().Kind } func (p *parser) eat(kind css_lexer.T) bool { if p.peek(kind) { p.advance() return true } return false } func (p *parser) expect(kind css_lexer.T) bool { if p.eat(kind) { return true } t := p.current() if (t.Flags & css_lexer.DidWarnAboutSingleLineComment) != 0 { return false } var text string var suggestion string expected := kind.String() if strings.HasPrefix(expected, "\"") && strings.HasSuffix(expected, "\"") { suggestion = expected[1 : len(expected)-1] } if (kind == css_lexer.TSemicolon || kind == css_lexer.TColon) && p.index > 0 && p.at(p.index-1).Kind == css_lexer.TWhitespace { // Have a nice error message for forgetting a trailing semicolon or colon text = fmt.Sprintf("Expected %s", expected) t = p.at(p.index - 1) } else { switch t.Kind { case css_lexer.TEndOfFile, css_lexer.TWhitespace: text = fmt.Sprintf("Expected %s but found %s", expected, t.Kind.String()) t.Range.Len = 0 case css_lexer.TBadURL, css_lexer.TBadString: text = fmt.Sprintf("Expected %s but found %s", expected, t.Kind.String()) default: text = fmt.Sprintf("Expected %s but found %q", expected, p.raw()) } } if t.Range.Loc.Start > p.prevError.Start { data := p.tracker.MsgData(t.Range, text) data.Location.Suggestion = suggestion p.log.AddMsg(logger.Msg{Kind: logger.Warning, Data: data}) p.prevError = t.Range.Loc } return false } func (p *parser) unexpected() { if t := p.current(); t.Range.Loc.Start > p.prevError.Start && (t.Flags&css_lexer.DidWarnAboutSingleLineComment) == 0 { var text string switch t.Kind { case css_lexer.TEndOfFile, css_lexer.TWhitespace: text = fmt.Sprintf("Unexpected %s", t.Kind.String()) t.Range.Len = 0 case css_lexer.TBadURL, css_lexer.TBadString: text = fmt.Sprintf("Unexpected %s", t.Kind.String()) default: text = fmt.Sprintf("Unexpected %q", p.raw()) } p.log.Add(logger.Warning, &p.tracker, t.Range, text) p.prevError = t.Range.Loc } } type ruleContext struct { isTopLevel bool parseSelectors bool } func (p *parser) parseListOfRules(context ruleContext) []css_ast.Rule { atRuleContext := atRuleContext{} if context.isTopLevel { atRuleContext.charsetValidity = atRuleValid atRuleContext.importValidity = atRuleValid } rules := []css_ast.Rule{} didFindAtImport := false loop: for { // If there are any legal comments immediately before the current token, // turn them all into comment rules and append them to the current rule list for p.legalCommentIndex < len(p.legalComments) { comment := p.legalComments[p.legalCommentIndex] if comment.TokenIndexAfter > uint32(p.index) { break } if comment.TokenIndexAfter == uint32(p.index) { rules = append(rules, css_ast.Rule{Loc: comment.Loc, Data: &css_ast.RComment{Text: comment.Text}}) } p.legalCommentIndex++ } switch p.current().Kind { case css_lexer.TEndOfFile: break loop case css_lexer.TCloseBrace: if !context.isTopLevel { break loop } case css_lexer.TWhitespace: p.advance() continue case css_lexer.TAtKeyword: rule := p.parseAtRule(atRuleContext) // Disallow "@charset" and "@import" after other rules if context.isTopLevel { switch r := rule.Data.(type) { case *css_ast.RAtCharset: // This doesn't invalidate anything because it always comes first case *css_ast.RAtImport: didFindAtImport = true if atRuleContext.charsetValidity == atRuleValid { atRuleContext.afterLoc = rule.Loc atRuleContext.charsetValidity = atRuleInvalidAfter } case *css_ast.RAtLayer: if atRuleContext.charsetValidity == atRuleValid { atRuleContext.afterLoc = rule.Loc atRuleContext.charsetValidity = atRuleInvalidAfter } // From the specification: "Note: No @layer rules are allowed between // @import and @namespace rules. Any @layer rule that comes after an // @import or @namespace rule will cause any subsequent @import or // @namespace rules to be ignored." if atRuleContext.importValidity == atRuleValid && (r.Rules != nil || didFindAtImport) { atRuleContext.afterLoc = rule.Loc atRuleContext.charsetValidity = atRuleInvalidAfter atRuleContext.importValidity = atRuleInvalidAfter } default: if atRuleContext.importValidity == atRuleValid { atRuleContext.afterLoc = rule.Loc atRuleContext.charsetValidity = atRuleInvalidAfter atRuleContext.importValidity = atRuleInvalidAfter } } } rules = append(rules, rule) continue case css_lexer.TCDO, css_lexer.TCDC: if context.isTopLevel { p.advance() continue } } if atRuleContext.importValidity == atRuleValid { atRuleContext.afterLoc = p.current().Range.Loc atRuleContext.charsetValidity = atRuleInvalidAfter atRuleContext.importValidity = atRuleInvalidAfter } if context.parseSelectors { rules = append(rules, p.parseSelectorRuleFrom(p.index, parseSelectorOpts{})) } else { rules = append(rules, p.parseQualifiedRuleFrom(p.index, false /* isAlreadyInvalid */)) } } if p.options.MinifySyntax { rules = mangleRules(rules) } return rules } func (p *parser) parseListOfDeclarations() (list []css_ast.Rule) { list = []css_ast.Rule{} for { switch p.current().Kind { case css_lexer.TWhitespace, css_lexer.TSemicolon: p.advance() case css_lexer.TEndOfFile, css_lexer.TCloseBrace: list = p.processDeclarations(list) if p.options.MinifySyntax { list = mangleRules(list) } return case css_lexer.TAtKeyword: list = append(list, p.parseAtRule(atRuleContext{ isDeclarationList: true, allowNesting: true, })) case css_lexer.TDelimAmpersand: // Reference: https://drafts.csswg.org/css-nesting-1/ list = append(list, p.parseSelectorRuleFrom(p.index, parseSelectorOpts{allowNesting: true})) default: if p.shouldParseNestedSelector() { list = append(list, p.parseSelectorRuleFrom(p.index, parseSelectorOpts{allowNesting: true})) } else { list = append(list, p.parseDeclaration()) } } } } func (p *parser) shouldParseNestedSelector() bool { clone := *p clone.log = logger.Log{AddMsg: func(msg logger.Msg) {}} // Peek ahead to treat declarations that actually start a nested rule as selector rules. if _, ok := clone.parseSelectorList(parseSelectorOpts{allowNesting: true}); !ok { return false } return clone.peek(css_lexer.TOpenBrace) } func mangleRules(rules []css_ast.Rule) []css_ast.Rule { type hashEntry struct { indices []uint32 } // Remove empty rules n := 0 for _, rule := range rules { switch r := rule.Data.(type) { case *css_ast.RAtKeyframes: // Do not remove empty "@keyframe foo {}" rules. Even empty rules still // dispatch JavaScript animation events, so removing them changes // behavior: https://bugzilla.mozilla.org/show_bug.cgi?id=1004377. case *css_ast.RAtLayer: if len(r.Rules) == 0 && len(r.Names) > 0 { // Do not remove empty "@layer foo {}" rules. The specification says: // "Cascade layers are sorted by the order in which they first are // declared, with nested layers grouped within their parent layers // before any unlayered rules." So removing empty rules could change // the order in which they are first declared, and is therefore invalid. // // We can turn "@layer foo {}" into "@layer foo;" to be shorter. But // don't collapse anonymous "@layer {}" into "@layer;" because that is // a syntax error. r.Rules = nil } else if len(r.Rules) == 1 && len(r.Names) == 1 { // Only collapse layers if each layer has exactly one name if r2, ok := r.Rules[0].Data.(*css_ast.RAtLayer); ok && len(r2.Names) == 1 { // "@layer a { @layer b {} }" => "@layer a.b;" // "@layer a { @layer b { c {} } }" => "@layer a.b { c {} }" r.Names[0] = append(r.Names[0], r2.Names[0]...) r.Rules = r2.Rules } } case *css_ast.RKnownAt: if len(r.Rules) == 0 { continue } case *css_ast.RSelector: if len(r.Rules) == 0 { continue } } rules[n] = rule n++ } rules = rules[:n] // Remove duplicate rules, scanning from the back so we keep the last duplicate start := n entries := make(map[uint32]hashEntry) skipRule: for i := n - 1; i >= 0; i-- { rule := rules[i] // Skip over preserved comments next := i - 1 for next >= 0 { if _, ok := rules[next].Data.(*css_ast.RComment); !ok { break } next-- } // Merge adjacent selectors with the same content // "a { color: red; } b { color: red; }" => "a, b { color: red; }" if next >= 0 { if r, ok := rule.Data.(*css_ast.RSelector); ok { if prev, ok := rules[next].Data.(*css_ast.RSelector); ok && css_ast.RulesEqual(r.Rules, prev.Rules) && isSafeSelectors(r.Selectors) && isSafeSelectors(prev.Selectors) { nextSelector: for _, sel := range r.Selectors { for _, prevSel := range prev.Selectors { if sel.Equal(prevSel) { // Don't add duplicate selectors more than once continue nextSelector } } prev.Selectors = append(prev.Selectors, sel) } continue skipRule } } } // For duplicate rules, omit all but the last copy if hash, ok := rule.Data.Hash(); ok { entry := entries[hash] for _, index := range entry.indices { if rule.Data.Equal(rules[index].Data) { continue skipRule } } entry.indices = append(entry.indices, uint32(i)) entries[hash] = entry } start-- rules[start] = rule } return rules[start:] } // Reference: https://developer.mozilla.org/en-US/docs/Web/HTML/Element var nonDeprecatedElementsSupportedByIE7 = map[string]bool{ "a": true, "abbr": true, "address": true, "area": true, "b": true, "base": true, "blockquote": true, "body": true, "br": true, "button": true, "caption": true, "cite": true, "code": true, "col": true, "colgroup": true, "dd": true, "del": true, "dfn": true, "div": true, "dl": true, "dt": true, "em": true, "embed": true, "fieldset": true, "form": true, "h1": true, "h2": true, "h3": true, "h4": true, "h5": true, "h6": true, "head": true, "hr": true, "html": true, "i": true, "iframe": true, "img": true, "input": true, "ins": true, "kbd": true, "label": true, "legend": true, "li": true, "link": true, "map": true, "menu": true, "meta": true, "noscript": true, "object": true, "ol": true, "optgroup": true, "option": true, "p": true, "param": true, "pre": true, "q": true, "ruby": true, "s": true, "samp": true, "script": true, "select": true, "small": true, "span": true, "strong": true, "style": true, "sub": true, "sup": true, "table": true, "tbody": true, "td": true, "textarea": true, "tfoot": true, "th": true, "thead": true, "title": true, "tr": true, "u": true, "ul": true, "var": true, } // This only returns true if all of these selectors are considered "safe" which // means that they are very likely to work in any browser a user might reasonably // be using. We do NOT want to merge adjacent qualified rules with the same body // if any of the selectors are unsafe, since then browsers which don't support // that particular feature would ignore the entire merged qualified rule: // // Input: // a { color: red } // b { color: red } // input::-moz-placeholder { color: red } // // Valid output: // a, b { color: red } // input::-moz-placeholder { color: red } // // Invalid output: // a, b, input::-moz-placeholder { color: red } // // This considers IE 7 and above to be a browser that a user could possibly use. // Versions of IE less than 6 are not considered. func isSafeSelectors(complexSelectors []css_ast.ComplexSelector) bool { for _, complex := range complexSelectors { for _, compound := range complex.Selectors { if compound.NestingSelector != css_ast.NestingSelectorNone { // Bail because this is an extension: https://drafts.csswg.org/css-nesting-1/ return false } if compound.Combinator != "" { // "Before Internet Explorer 10, the combinator only works in standards mode" // Reference: https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Selectors return false } if compound.TypeSelector != nil { if compound.TypeSelector.NamespacePrefix != nil { // Bail if we hit a namespace, which doesn't work in IE before version 9 // Reference: https://developer.mozilla.org/en-US/docs/Web/CSS/Type_selectors return false } if compound.TypeSelector.Name.Kind == css_lexer.TIdent && !nonDeprecatedElementsSupportedByIE7[compound.TypeSelector.Name.Text] { // Bail if this element is either deprecated or not supported in IE 7 return false } } for _, ss := range compound.SubclassSelectors { switch s := ss.(type) { case *css_ast.SSAttribute: if s.MatcherModifier != 0 { // Bail if we hit a case modifier, which doesn't work in IE at all // Reference: https://developer.mozilla.org/en-US/docs/Web/CSS/Attribute_selectors return false } case *css_ast.SSPseudoClass: // Bail if this pseudo class doesn't match a hard-coded list that's // known to work everywhere. For example, ":focus" doesn't work in IE 7. // Reference: https://developer.mozilla.org/en-US/docs/Web/CSS/Pseudo-classes if s.Args == nil && !s.IsElement { switch s.Name { case "active", "first-child", "hover", "link", "visited": continue } } return false } } } } return true } func (p *parser) parseURLOrString() (string, logger.Range, bool) { t := p.current() switch t.Kind { case css_lexer.TString: text := p.decoded() p.advance() return text, t.Range, true case css_lexer.TURL: text := p.decoded() p.advance() return text, t.Range, true case css_lexer.TFunction: if p.decoded() == "url" { p.advance() t = p.current() text := p.decoded() if p.expect(css_lexer.TString) && p.expect(css_lexer.TCloseParen) { return text, t.Range, true } } } return "", logger.Range{}, false } func (p *parser) expectURLOrString() (url string, r logger.Range, ok bool) { url, r, ok = p.parseURLOrString() if !ok { p.expect(css_lexer.TURL) } return } type atRuleKind uint8 const ( atRuleUnknown atRuleKind = iota atRuleDeclarations atRuleInheritContext atRuleQualifiedOrEmpty atRuleEmpty ) var specialAtRules = map[string]atRuleKind{ "font-face": atRuleDeclarations, "page": atRuleDeclarations, // These go inside "@page": https://www.w3.org/TR/css-page-3/#syntax-page-selector "bottom-center": atRuleDeclarations, "bottom-left-corner": atRuleDeclarations, "bottom-left": atRuleDeclarations, "bottom-right-corner": atRuleDeclarations, "bottom-right": atRuleDeclarations, "left-bottom": atRuleDeclarations, "left-middle": atRuleDeclarations, "left-top": atRuleDeclarations, "right-bottom": atRuleDeclarations, "right-middle": atRuleDeclarations, "right-top": atRuleDeclarations, "top-center": atRuleDeclarations, "top-left-corner": atRuleDeclarations, "top-left": atRuleDeclarations, "top-right-corner": atRuleDeclarations, "top-right": atRuleDeclarations, // These properties are very deprecated and appear to only be useful for // mobile versions of internet explorer (which may no longer exist?), but // they are used by the https://ant.design/ design system so we recognize // them to avoid the warning. // // Documentation: https://developer.mozilla.org/en-US/docs/Web/CSS/@viewport // Discussion: https://github.com/w3c/csswg-drafts/issues/4766 // "viewport": atRuleDeclarations, "-ms-viewport": atRuleDeclarations, // This feature has been removed from the web because it's actively harmful. // However, there is one exception where "@-moz-document url-prefix() {" is // accepted by Firefox to basically be an "if Firefox" conditional rule. // // Documentation: https://developer.mozilla.org/en-US/docs/Web/CSS/@document // Discussion: https://bugzilla.mozilla.org/show_bug.cgi?id=1035091 // "document": atRuleInheritContext, "-moz-document": atRuleInheritContext, // This is a new feature that changes how the CSS rule cascade works. It can // end in either a "{}" block or a ";" rule terminator so we need this special // case to support both. // // Documentation: https://developer.mozilla.org/en-US/docs/Web/CSS/@layer // Motivation: https://developer.chrome.com/blog/cascade-layers/ // "layer": atRuleQualifiedOrEmpty, "media": atRuleInheritContext, "scope": atRuleInheritContext, "supports": atRuleInheritContext, // Reference: https://github.com/w3c/csswg-drafts/issues?q=is%3Aissue+label%3Acss-contain-3+ "container": atRuleInheritContext, // Reference: https://drafts.csswg.org/css-transitions-2/#defining-before-change-style-the-starting-style-rule "starting-style": atRuleInheritContext, // Reference: https://drafts.csswg.org/css-nesting-1/ "nest": atRuleDeclarations, } type atRuleValidity uint8 const ( atRuleInvalid atRuleValidity = iota atRuleValid atRuleInvalidAfter ) type atRuleContext struct { afterLoc logger.Loc charsetValidity atRuleValidity importValidity atRuleValidity isDeclarationList bool allowNesting bool } func (p *parser) parseAtRule(context atRuleContext) css_ast.Rule { // Parse the name atToken := p.decoded() atRange := p.current().Range kind := specialAtRules[atToken] p.advance() // Parse the prelude preludeStart := p.index abortRuleParser: switch atToken { case "charset": switch context.charsetValidity { case atRuleInvalid: p.log.Add(logger.Warning, &p.tracker, atRange, "\"@charset\" must be the first rule in the file") case atRuleInvalidAfter: p.log.AddWithNotes(logger.Warning, &p.tracker, atRange, "\"@charset\" must be the first rule in the file", []logger.MsgData{p.tracker.MsgData(logger.Range{Loc: context.afterLoc}, "This rule cannot come before a \"@charset\" rule")}) case atRuleValid: kind = atRuleEmpty p.expect(css_lexer.TWhitespace) if p.peek(css_lexer.TString) { encoding := p.decoded() if !strings.EqualFold(encoding, "UTF-8") { p.log.Add(logger.Warning, &p.tracker, p.current().Range, fmt.Sprintf("\"UTF-8\" will be used instead of unsupported charset %q", encoding)) } p.advance() p.expect(css_lexer.TSemicolon) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RAtCharset{Encoding: encoding}} } p.expect(css_lexer.TString) } case "import": switch context.importValidity { case atRuleInvalid: p.log.Add(logger.Warning, &p.tracker, atRange, "\"@import\" is only valid at the top level") case atRuleInvalidAfter: p.log.AddWithNotes(logger.Warning, &p.tracker, atRange, "All \"@import\" rules must come first", []logger.MsgData{p.tracker.MsgData(logger.Range{Loc: context.afterLoc}, "This rule cannot come before an \"@import\" rule")}) case atRuleValid: kind = atRuleEmpty p.eat(css_lexer.TWhitespace) if path, r, ok := p.expectURLOrString(); ok { importConditionsStart := p.index for { if kind := p.current().Kind; kind == css_lexer.TSemicolon || kind == css_lexer.TOpenBrace || kind == css_lexer.TCloseBrace || kind == css_lexer.TEndOfFile { break } p.parseComponentValue() } if p.current().Kind == css_lexer.TOpenBrace { break // Avoid parsing an invalid "@import" rule } importConditions := p.convertTokens(p.tokens[importConditionsStart:p.index]) kind := ast.ImportAt // Insert or remove whitespace before the first token if len(importConditions) > 0 { kind = ast.ImportAtConditional if p.options.MinifyWhitespace { importConditions[0].Whitespace &= ^css_ast.WhitespaceBefore } else { importConditions[0].Whitespace |= css_ast.WhitespaceBefore } } p.expect(css_lexer.TSemicolon) importRecordIndex := uint32(len(p.importRecords)) p.importRecords = append(p.importRecords, ast.ImportRecord{ Kind: kind, Path: logger.Path{Text: path}, Range: r, }) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RAtImport{ ImportRecordIndex: importRecordIndex, ImportConditions: importConditions, }} } } case "keyframes", "-webkit-keyframes", "-moz-keyframes", "-ms-keyframes", "-o-keyframes": p.eat(css_lexer.TWhitespace) var name string if p.peek(css_lexer.TIdent) { name = p.decoded() p.advance() } else if !p.expect(css_lexer.TIdent) && !p.eat(css_lexer.TString) && !p.peek(css_lexer.TOpenBrace) { // Consider string names a syntax error even though they are allowed by // the specification and they work in Firefox because they do not work in // Chrome or Safari. break } p.eat(css_lexer.TWhitespace) blockStart := p.index if p.expect(css_lexer.TOpenBrace) { var blocks []css_ast.KeyframeBlock badSyntax: for { switch p.current().Kind { case css_lexer.TWhitespace: p.advance() continue case css_lexer.TCloseBrace: p.advance() return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RAtKeyframes{ AtToken: atToken, Name: name, Blocks: blocks, }} case css_lexer.TEndOfFile: break badSyntax case css_lexer.TOpenBrace: p.expect(css_lexer.TPercentage) break badSyntax default: var selectors []string selectors: for { t := p.current() switch t.Kind { case css_lexer.TWhitespace: p.advance() continue case css_lexer.TOpenBrace: p.advance() break selectors case css_lexer.TCloseBrace, css_lexer.TEndOfFile: p.expect(css_lexer.TOpenBrace) break badSyntax case css_lexer.TIdent, css_lexer.TPercentage: text := p.decoded() if t.Kind == css_lexer.TIdent { if text == "from" { if p.options.MinifySyntax { text = "0%" // "0%" is equivalent to but shorter than "from" } } else if text != "to" { p.expect(css_lexer.TPercentage) } } else if p.options.MinifySyntax && text == "100%" { text = "to" // "to" is equivalent to but shorter than "100%" } selectors = append(selectors, text) p.advance() // Keyframe selectors are comma-separated p.eat(css_lexer.TWhitespace) if p.eat(css_lexer.TComma) { p.eat(css_lexer.TWhitespace) if k := p.current().Kind; k != css_lexer.TIdent && k != css_lexer.TPercentage { p.expect(css_lexer.TPercentage) break badSyntax } } else if k := p.current().Kind; k != css_lexer.TOpenBrace && k != css_lexer.TCloseBrace && k != css_lexer.TEndOfFile { p.expect(css_lexer.TComma) break badSyntax } default: p.expect(css_lexer.TPercentage) break badSyntax } } rules := p.parseListOfDeclarations() p.expect(css_lexer.TCloseBrace) // "@keyframes { from {} to { color: red } }" => "@keyframes { to { color: red } }" if !p.options.MinifySyntax || len(rules) > 0 { blocks = append(blocks, css_ast.KeyframeBlock{ Selectors: selectors, Rules: rules, }) } } } // Otherwise, finish parsing the body and return an unknown rule for !p.peek(css_lexer.TCloseBrace) && !p.peek(css_lexer.TEndOfFile) { p.parseComponentValue() } p.expect(css_lexer.TCloseBrace) prelude := p.convertTokens(p.tokens[preludeStart:blockStart]) block, _ := p.convertTokensHelper(p.tokens[blockStart:p.index], css_lexer.TEndOfFile, convertTokensOpts{allowImports: true}) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RUnknownAt{AtToken: atToken, Prelude: prelude, Block: block}} } case "nest": // Reference: https://drafts.csswg.org/css-nesting-1/ p.eat(css_lexer.TWhitespace) if kind := p.current().Kind; kind != css_lexer.TSemicolon && kind != css_lexer.TOpenBrace && kind != css_lexer.TCloseBrace && kind != css_lexer.TEndOfFile { return p.parseSelectorRuleFrom(preludeStart-1, parseSelectorOpts{atNestRange: atRange, allowNesting: context.allowNesting}) } case "layer": // Reference: https://developer.mozilla.org/en-US/docs/Web/CSS/@layer // Read the layer name list var names [][]string p.eat(css_lexer.TWhitespace) if p.peek(css_lexer.TIdent) { for { ident, ok := p.expectValidLayerNameIdent() if !ok { break abortRuleParser } name := []string{ident} for { p.eat(css_lexer.TWhitespace) if !p.eat(css_lexer.TDelimDot) { break } p.eat(css_lexer.TWhitespace) ident, ok := p.expectValidLayerNameIdent() if !ok { break abortRuleParser } name = append(name, ident) } names = append(names, name) p.eat(css_lexer.TWhitespace) if !p.eat(css_lexer.TComma) { break } p.eat(css_lexer.TWhitespace) } } // Read the optional block if len(names) <= 1 && p.eat(css_lexer.TOpenBrace) { rules := p.parseListOfRules(ruleContext{ parseSelectors: true, }) p.expect(css_lexer.TCloseBrace) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RAtLayer{Names: names, Rules: rules}} } // Handle lack of a block if len(names) >= 1 && p.eat(css_lexer.TSemicolon) { return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RAtLayer{Names: names}} } // Otherwise there's some kind of syntax error if kind := p.current().Kind; kind == css_lexer.TOpenBrace || kind == css_lexer.TCloseBrace || kind == css_lexer.TEndOfFile { p.expect(css_lexer.TSemicolon) } else { p.unexpected() } default: if kind == atRuleUnknown && atToken == "namespace" { // CSS namespaces are a weird feature that appears to only really be // useful for styling XML. And the world has moved on from XHTML to // HTML5 so pretty much no one uses CSS namespaces anymore. They are // also complicated to support in a bundler because CSS namespaces are // file-scoped, which means: // // * Default namespaces can be different in different files, in which // case some default namespaces would have to be converted to prefixed // namespaces to avoid collisions. // // * Prefixed namespaces from different files can use the same name, in // which case some prefixed namespaces would need to be renamed to // avoid collisions. // // Instead of implementing all of that for an extremely obscure feature, // CSS namespaces are just explicitly not supported. p.log.Add(logger.Warning, &p.tracker, atRange, "\"@namespace\" rules are not supported") } } // Parse an unknown prelude prelude: for { switch p.current().Kind { case css_lexer.TOpenBrace, css_lexer.TEndOfFile: break prelude case css_lexer.TSemicolon, css_lexer.TCloseBrace: prelude := p.convertTokens(p.tokens[preludeStart:p.index]) switch kind { case atRuleQualifiedOrEmpty: // Parse a known at rule below break prelude case atRuleEmpty, atRuleUnknown: // Parse an unknown at rule p.expect(css_lexer.TSemicolon) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RUnknownAt{AtToken: atToken, Prelude: prelude}} default: // Report an error for rules that should have blocks p.expect(css_lexer.TOpenBrace) p.eat(css_lexer.TSemicolon) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RUnknownAt{AtToken: atToken, Prelude: prelude}} } default: p.parseComponentValue() } } prelude := p.convertTokens(p.tokens[preludeStart:p.index]) blockStart := p.index switch kind { case atRuleEmpty: // Report an error for rules that shouldn't have blocks p.expect(css_lexer.TSemicolon) p.parseBlock(css_lexer.TOpenBrace, css_lexer.TCloseBrace) block := p.convertTokens(p.tokens[blockStart:p.index]) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RUnknownAt{AtToken: atToken, Prelude: prelude, Block: block}} case atRuleDeclarations: // Parse known rules whose blocks always consist of declarations p.expect(css_lexer.TOpenBrace) rules := p.parseListOfDeclarations() p.expect(css_lexer.TCloseBrace) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RKnownAt{AtToken: atToken, Prelude: prelude, Rules: rules}} case atRuleInheritContext: // Parse known rules whose blocks consist of whatever the current context is p.expect(css_lexer.TOpenBrace) var rules []css_ast.Rule if context.isDeclarationList { rules = p.parseListOfDeclarations() } else { rules = p.parseListOfRules(ruleContext{ parseSelectors: true, }) } p.expect(css_lexer.TCloseBrace) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RKnownAt{AtToken: atToken, Prelude: prelude, Rules: rules}} case atRuleQualifiedOrEmpty: if p.eat(css_lexer.TOpenBrace) { rules := p.parseListOfRules(ruleContext{ parseSelectors: true, }) p.expect(css_lexer.TCloseBrace) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RKnownAt{AtToken: atToken, Prelude: prelude, Rules: rules}} } p.expect(css_lexer.TSemicolon) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RKnownAt{AtToken: atToken, Prelude: prelude}} default: // Otherwise, parse an unknown rule p.parseBlock(css_lexer.TOpenBrace, css_lexer.TCloseBrace) block, _ := p.convertTokensHelper(p.tokens[blockStart:p.index], css_lexer.TEndOfFile, convertTokensOpts{allowImports: true}) return css_ast.Rule{Loc: atRange.Loc, Data: &css_ast.RUnknownAt{AtToken: atToken, Prelude: prelude, Block: block}} } } func (p *parser) expectValidLayerNameIdent() (string, bool) { r := p.current().Range text := p.decoded() if !p.expect(css_lexer.TIdent) { return "", false } switch text { case "initial", "inherit", "unset": p.log.Add(logger.Warning, &p.tracker, r, fmt.Sprintf("%q cannot be used as a layer name", text)) p.prevError = r.Loc return "", false } return text, true } func (p *parser) convertTokens(tokens []css_lexer.Token) []css_ast.Token { result, _ := p.convertTokensHelper(tokens, css_lexer.TEndOfFile, convertTokensOpts{}) return result } type convertTokensOpts struct { allowImports bool verbatimWhitespace bool isInsideCalcFunction bool } func (p *parser) convertTokensHelper(tokens []css_lexer.Token, close css_lexer.T, opts convertTokensOpts) ([]css_ast.Token, []css_lexer.Token) { var result []css_ast.Token var nextWhitespace css_ast.WhitespaceFlags // Enable verbatim whitespace mode when the first two non-whitespace tokens // are a CSS variable name followed by a colon. This is because it could be // a form of CSS variable usage, and removing whitespace could potentially // break this usage. For example, the following CSS is ignored by Chrome if // the whitespace isn't preserved: // // @supports (--foo: ) { // html { background: green; } // } // // Strangely whitespace removal doesn't cause the declaration to be ignored // in Firefox or Safari, so there's definitely a browser bug somewhere. if !opts.verbatimWhitespace { for i, t := range tokens { if t.Kind == css_lexer.TWhitespace { continue } if t.Kind == css_lexer.TIdent && strings.HasPrefix(t.DecodedText(p.source.Contents), "--") { for _, t := range tokens[i+1:] { if t.Kind == css_lexer.TWhitespace { continue } if t.Kind == css_lexer.TColon { opts.verbatimWhitespace = true } break } } break } } loop: for len(tokens) > 0 { t := tokens[0] tokens = tokens[1:] if t.Kind == close { break loop } token := css_ast.Token{ Kind: t.Kind, Text: t.DecodedText(p.source.Contents), Whitespace: nextWhitespace, } nextWhitespace = 0 // Warn about invalid "+" and "-" operators that break the containing "calc()" if opts.isInsideCalcFunction && t.Kind.IsNumeric() && len(result) > 0 && result[len(result)-1].Kind.IsNumeric() && (strings.HasPrefix(token.Text, "+") || strings.HasPrefix(token.Text, "-")) { // "calc(1+2)" and "calc(1-2)" are invalid p.log.Add(logger.Warning, &p.tracker, logger.Range{Loc: t.Range.Loc, Len: 1}, fmt.Sprintf("The %q operator only works if there is whitespace on both sides", token.Text[:1])) } switch t.Kind { case css_lexer.TWhitespace: if last := len(result) - 1; last >= 0 { result[last].Whitespace |= css_ast.WhitespaceAfter } nextWhitespace = css_ast.WhitespaceBefore continue case css_lexer.TDelimPlus, css_lexer.TDelimMinus: // Warn about invalid "+" and "-" operators that break the containing "calc()" if opts.isInsideCalcFunction && len(tokens) > 0 { if len(result) == 0 || result[len(result)-1].Kind == css_lexer.TComma { // "calc(-(1 + 2))" is invalid p.log.Add(logger.Warning, &p.tracker, t.Range, fmt.Sprintf("%q can only be used as an infix operator, not a prefix operator", token.Text)) } else if token.Whitespace != css_ast.WhitespaceBefore || tokens[0].Kind != css_lexer.TWhitespace { // "calc(1- 2)" and "calc(1 -(2))" are invalid p.log.Add(logger.Warning, &p.tracker, t.Range, fmt.Sprintf("The %q operator only works if there is whitespace on both sides", token.Text)) } } case css_lexer.TNumber: if p.options.MinifySyntax { if text, ok := mangleNumber(token.Text); ok { token.Text = text } } case css_lexer.TPercentage: if p.options.MinifySyntax { if text, ok := mangleNumber(token.PercentageValue()); ok { token.Text = text + "%" } } case css_lexer.TDimension: token.UnitOffset = t.UnitOffset if p.options.MinifySyntax { if text, ok := mangleNumber(token.DimensionValue()); ok { token.Text = text + token.DimensionUnit() token.UnitOffset = uint16(len(text)) } if value, unit, ok := mangleDimension(token.DimensionValue(), token.DimensionUnit()); ok { token.Text = value + unit token.UnitOffset = uint16(len(value)) } } case css_lexer.TURL: token.ImportRecordIndex = uint32(len(p.importRecords)) var flags ast.ImportRecordFlags if !opts.allowImports { flags |= ast.IsUnused } p.importRecords = append(p.importRecords, ast.ImportRecord{ Kind: ast.ImportURL, Path: logger.Path{Text: token.Text}, Range: t.Range, Flags: flags, }) token.Text = "" case css_lexer.TFunction: var nested []css_ast.Token original := tokens nestedOpts := opts if token.Text == "var" { // CSS variables require verbatim whitespace for correctness nestedOpts.verbatimWhitespace = true } if token.Text == "calc" { nestedOpts.isInsideCalcFunction = true } nested, tokens = p.convertTokensHelper(tokens, css_lexer.TCloseParen, nestedOpts) token.Children = &nested // Apply "calc" simplification rules when minifying if p.options.MinifySyntax && token.Text == "calc" { token = p.tryToReduceCalcExpression(token) } // Treat a URL function call with a string just like a URL token if token.Text == "url" && len(nested) == 1 && nested[0].Kind == css_lexer.TString { token.Kind = css_lexer.TURL token.Text = "" token.Children = nil token.ImportRecordIndex = uint32(len(p.importRecords)) var flags ast.ImportRecordFlags if !opts.allowImports { flags |= ast.IsUnused } p.importRecords = append(p.importRecords, ast.ImportRecord{ Kind: ast.ImportURL, Path: logger.Path{Text: nested[0].Text}, Range: original[0].Range, Flags: flags, }) } case css_lexer.TOpenParen: var nested []css_ast.Token nested, tokens = p.convertTokensHelper(tokens, css_lexer.TCloseParen, opts) token.Children = &nested case css_lexer.TOpenBrace: var nested []css_ast.Token nested, tokens = p.convertTokensHelper(tokens, css_lexer.TCloseBrace, opts) // Pretty-printing: insert leading and trailing whitespace when not minifying if !opts.verbatimWhitespace && !p.options.MinifyWhitespace && len(nested) > 0 { nested[0].Whitespace |= css_ast.WhitespaceBefore nested[len(nested)-1].Whitespace |= css_ast.WhitespaceAfter } token.Children = &nested case css_lexer.TOpenBracket: var nested []css_ast.Token nested, tokens = p.convertTokensHelper(tokens, css_lexer.TCloseBracket, opts) token.Children = &nested } result = append(result, token) } if !opts.verbatimWhitespace { for i := range result { token := &result[i] // Always remove leading and trailing whitespace if i == 0 { token.Whitespace &= ^css_ast.WhitespaceBefore } if i+1 == len(result) { token.Whitespace &= ^css_ast.WhitespaceAfter } switch token.Kind { case css_lexer.TComma: // Assume that whitespace can always be removed before a comma token.Whitespace &= ^css_ast.WhitespaceBefore if i > 0 { result[i-1].Whitespace &= ^css_ast.WhitespaceAfter } // Assume whitespace can always be added after a comma if p.options.MinifyWhitespace { token.Whitespace &= ^css_ast.WhitespaceAfter if i+1 < len(result) { result[i+1].Whitespace &= ^css_ast.WhitespaceBefore } } else { token.Whitespace |= css_ast.WhitespaceAfter if i+1 < len(result) { result[i+1].Whitespace |= css_ast.WhitespaceBefore } } } } } // Insert an explicit whitespace token if we're in verbatim mode and all // tokens were whitespace. In this case there is no token to attach the // whitespace before/after flags so this is the only way to represent this. // This is the only case where this function generates an explicit whitespace // token. It represents whitespace as flags in all other cases. if opts.verbatimWhitespace && len(result) == 0 && nextWhitespace == css_ast.WhitespaceBefore { result = append(result, css_ast.Token{ Kind: css_lexer.TWhitespace, }) } return result, tokens } func shiftDot(text string, dotOffset int) (string, bool) { // This doesn't handle numbers with exponents if strings.ContainsAny(text, "eE") { return "", false } // Handle a leading sign sign := "" if len(text) > 0 && (text[0] == '-' || text[0] == '+') { sign = text[:1] text = text[1:] } // Remove the dot dot := strings.IndexByte(text, '.') if dot == -1 { dot = len(text) } else { text = text[:dot] + text[dot+1:] } // Move the dot dot += dotOffset // Remove any leading zeros before the dot for len(text) > 0 && dot > 0 && text[0] == '0' { text = text[1:] dot-- } // Remove any trailing zeros after the dot for len(text) > 0 && len(text) > dot && text[len(text)-1] == '0' { text = text[:len(text)-1] } // Does this number have no fractional component? if dot >= len(text) { trailingZeros := strings.Repeat("0", dot-len(text)) return fmt.Sprintf("%s%s%s", sign, text, trailingZeros), true } // Potentially add leading zeros if dot < 0 { text = strings.Repeat("0", -dot) + text dot = 0 } // Insert the dot again return fmt.Sprintf("%s%s.%s", sign, text[:dot], text[dot:]), true } func mangleDimension(value string, unit string) (string, string, bool) { const msLen = 2 const sLen = 1 // Mangle times: https://developer.mozilla.org/en-US/docs/Web/CSS/time if strings.EqualFold(unit, "ms") { if shifted, ok := shiftDot(value, -3); ok && len(shifted)+sLen < len(value)+msLen { // Convert "ms" to "s" if shorter return shifted, "s", true } } if strings.EqualFold(unit, "s") { if shifted, ok := shiftDot(value, 3); ok && len(shifted)+msLen < len(value)+sLen { // Convert "s" to "ms" if shorter return shifted, "ms", true } } return "", "", false } func mangleNumber(t string) (string, bool) { original := t if dot := strings.IndexByte(t, '.'); dot != -1 { // Remove trailing zeros for len(t) > 0 && t[len(t)-1] == '0' { t = t[:len(t)-1] } // Remove the decimal point if it's unnecessary if dot+1 == len(t) { t = t[:dot] if t == "" || t == "+" || t == "-" { t += "0" } } else { // Remove a leading zero if len(t) >= 3 && t[0] == '0' && t[1] == '.' && t[2] >= '0' && t[2] <= '9' { t = t[1:] } else if len(t) >= 4 && (t[0] == '+' || t[0] == '-') && t[1] == '0' && t[2] == '.' && t[3] >= '0' && t[3] <= '9' { t = t[0:1] + t[2:] } } } return t, t != original } func (p *parser) parseSelectorRuleFrom(preludeStart int, opts parseSelectorOpts) css_ast.Rule { // Try parsing the prelude as a selector list if list, ok := p.parseSelectorList(opts); ok { selector := css_ast.RSelector{ Selectors: list, HasAtNest: opts.atNestRange.Len != 0, } if p.expect(css_lexer.TOpenBrace) { selector.Rules = p.parseListOfDeclarations() p.expect(css_lexer.TCloseBrace) // Minify "@nest" when possible if p.options.MinifySyntax && selector.HasAtNest { allHaveNestPrefix := true for _, complex := range selector.Selectors { if len(complex.Selectors) == 0 || complex.Selectors[0].NestingSelector != css_ast.NestingSelectorPrefix { allHaveNestPrefix = false break } } if allHaveNestPrefix { selector.HasAtNest = false } } return css_ast.Rule{Loc: p.tokens[preludeStart].Range.Loc, Data: &selector} } } // Otherwise, parse a generic qualified rule return p.parseQualifiedRuleFrom(preludeStart, true /* isAlreadyInvalid */) } func (p *parser) parseQualifiedRuleFrom(preludeStart int, isAlreadyInvalid bool) css_ast.Rule { preludeLoc := p.tokens[preludeStart].Range.Loc loop: for { switch p.current().Kind { case css_lexer.TOpenBrace, css_lexer.TEndOfFile: break loop default: p.parseComponentValue() } } qualified := css_ast.RQualified{ Prelude: p.convertTokens(p.tokens[preludeStart:p.index]), } if p.eat(css_lexer.TOpenBrace) { qualified.Rules = p.parseListOfDeclarations() p.expect(css_lexer.TCloseBrace) } else if !isAlreadyInvalid { p.expect(css_lexer.TOpenBrace) } return css_ast.Rule{Loc: preludeLoc, Data: &qualified} } func (p *parser) parseDeclaration() css_ast.Rule { // Parse the key keyStart := p.index keyLoc := p.tokens[keyStart].Range.Loc ok := false if p.expect(css_lexer.TIdent) { p.eat(css_lexer.TWhitespace) if p.expect(css_lexer.TColon) { ok = true } } // Parse the value valueStart := p.index stop: for { switch p.current().Kind { case css_lexer.TEndOfFile, css_lexer.TSemicolon, css_lexer.TCloseBrace: break stop default: p.parseComponentValue() } } // Stop now if this is not a valid declaration if !ok { return css_ast.Rule{Loc: keyLoc, Data: &css_ast.RBadDeclaration{ Tokens: p.convertTokens(p.tokens[keyStart:p.index]), }} } keyToken := p.tokens[keyStart] keyText := keyToken.DecodedText(p.source.Contents) value := p.tokens[valueStart:p.index] verbatimWhitespace := strings.HasPrefix(keyText, "--") // Remove trailing "!important" important := false i := len(value) - 1 if i >= 0 && value[i].Kind == css_lexer.TWhitespace { i-- } if i >= 0 && value[i].Kind == css_lexer.TIdent && strings.EqualFold(value[i].DecodedText(p.source.Contents), "important") { i-- if i >= 0 && value[i].Kind == css_lexer.TWhitespace { i-- } if i >= 0 && value[i].Kind == css_lexer.TDelimExclamation { value = value[:i] important = true } } result, _ := p.convertTokensHelper(value, css_lexer.TEndOfFile, convertTokensOpts{ allowImports: true, // CSS variables require verbatim whitespace for correctness verbatimWhitespace: verbatimWhitespace, }) // Insert or remove whitespace before the first token if !verbatimWhitespace && len(result) > 0 { if p.options.MinifyWhitespace { result[0].Whitespace &= ^css_ast.WhitespaceBefore } else { result[0].Whitespace |= css_ast.WhitespaceBefore } } key := css_ast.KnownDeclarations[keyText] // Attempt to point out trivial typos if key == css_ast.DUnknown { if corrected, ok := css_ast.MaybeCorrectDeclarationTypo(keyText); ok { data := p.tracker.MsgData(keyToken.Range, fmt.Sprintf("%q is not a known CSS property", keyText)) data.Location.Suggestion = corrected p.log.AddMsg(logger.Msg{Kind: logger.Warning, Data: data, Notes: []logger.MsgData{{Text: fmt.Sprintf("Did you mean %q instead?", corrected)}}}) } } return css_ast.Rule{Loc: keyLoc, Data: &css_ast.RDeclaration{ Key: key, KeyText: keyText, KeyRange: keyToken.Range, Value: result, Important: important, }} } func (p *parser) parseComponentValue() { switch p.current().Kind { case css_lexer.TFunction: p.parseBlock(css_lexer.TFunction, css_lexer.TCloseParen) case css_lexer.TOpenParen: p.parseBlock(css_lexer.TOpenParen, css_lexer.TCloseParen) case css_lexer.TOpenBrace: p.parseBlock(css_lexer.TOpenBrace, css_lexer.TCloseBrace) case css_lexer.TOpenBracket: p.parseBlock(css_lexer.TOpenBracket, css_lexer.TCloseBracket) case css_lexer.TEndOfFile: p.unexpected() default: p.advance() } } func (p *parser) parseBlock(open css_lexer.T, close css_lexer.T) { if p.expect(open) { for !p.eat(close) { if p.peek(css_lexer.TEndOfFile) { p.expect(close) return } p.parseComponentValue() } } } ================================================ FILE: lib/esbuild/css_parser/css_parser_selector.go ================================================ package css_parser import ( "fmt" "github.com/withastro/compiler/lib/esbuild/compat" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" "github.com/withastro/compiler/lib/esbuild/logger" ) func (p *parser) parseSelectorList(opts parseSelectorOpts) (list []css_ast.ComplexSelector, ok bool) { // Parse the first selector firstRange := p.current().Range sel, good, firstHasNestPrefix := p.parseComplexSelector(opts) if !good { return } list = append(list, sel) // Parse the remaining selectors for { p.eat(css_lexer.TWhitespace) if !p.eat(css_lexer.TComma) { break } p.eat(css_lexer.TWhitespace) loc := p.current().Range.Loc sel, good, hasNestPrefix := p.parseComplexSelector(opts) if !good { return } list = append(list, sel) // Validate nest prefix consistency if firstHasNestPrefix && !hasNestPrefix && opts.atNestRange.Len == 0 { data := p.tracker.MsgData(logger.Range{Loc: loc}, "Every selector in a nested style rule must start with \"&\"") data.Location.Suggestion = "&" p.log.AddMsg(logger.Msg{ Kind: logger.Warning, Data: data, Notes: []logger.MsgData{p.tracker.MsgData(firstRange, "This is a nested style rule because of the \"&\" here:")}, }) } } ok = true return } type parseSelectorOpts struct { atNestRange logger.Range allowNesting bool } func (p *parser) parseComplexSelector(opts parseSelectorOpts) (result css_ast.ComplexSelector, ok bool, hasNestPrefix bool) { // Parent loc := p.current().Range.Loc sel, good := p.parseCompoundSelector(opts) if !good { return } hasNestPrefix = sel.NestingSelector == css_ast.NestingSelectorPrefix isNestContaining := sel.NestingSelector != css_ast.NestingSelectorNone result.Selectors = append(result.Selectors, sel) for { p.eat(css_lexer.TWhitespace) if p.peek(css_lexer.TEndOfFile) || p.peek(css_lexer.TComma) || p.peek(css_lexer.TOpenBrace) { break } // Optional combinator combinator := p.parseCombinator() if combinator != "" { p.eat(css_lexer.TWhitespace) } // Child sel, good := p.parseCompoundSelector(opts) if !good { return } sel.Combinator = combinator result.Selectors = append(result.Selectors, sel) if sel.NestingSelector != css_ast.NestingSelectorNone { isNestContaining = true } } // Validate nest selector consistency if opts.atNestRange.Len != 0 && !isNestContaining { p.log.AddWithNotes(logger.Warning, &p.tracker, logger.Range{Loc: loc}, "Every selector in a nested style rule must contain \"&\"", []logger.MsgData{p.tracker.MsgData(opts.atNestRange, "This is a nested style rule because of the \"@nest\" here:")}) } ok = true return } func (p *parser) nameToken() css_ast.NameToken { return css_ast.NameToken{ Kind: p.current().Kind, Text: p.decoded(), } } func (p *parser) maybeWarnAboutNesting(r logger.Range, opts parseSelectorOpts) { if !opts.allowNesting { p.log.Add(logger.Warning, &p.tracker, r, "CSS nesting syntax cannot be used outside of a style rule") } else if p.options.UnsupportedCSSFeatures.Has(compat.Nesting) { text := "CSS nesting syntax is not supported in the configured target environment" if p.options.OriginalTargetEnv != "" { text = fmt.Sprintf("%s (%s)", text, p.options.OriginalTargetEnv) } p.log.Add(logger.Warning, &p.tracker, r, text) } } func (p *parser) parseCompoundSelector(opts parseSelectorOpts) (sel css_ast.CompoundSelector, ok bool) { // This is an extension: https://drafts.csswg.org/css-nesting-1/ r := p.current().Range if p.eat(css_lexer.TDelimAmpersand) { sel.NestingSelector = css_ast.NestingSelectorPrefix p.maybeWarnAboutNesting(r, opts) } // Parse the type selector switch p.current().Kind { case css_lexer.TDelimBar, css_lexer.TIdent, css_lexer.TDelimAsterisk: nsName := css_ast.NamespacedName{} if !p.peek(css_lexer.TDelimBar) { nsName.Name = p.nameToken() p.advance() } else { // Hack: Create an empty "identifier" to represent this nsName.Name.Kind = css_lexer.TIdent } if p.eat(css_lexer.TDelimBar) { if !p.peek(css_lexer.TIdent) && !p.peek(css_lexer.TDelimAsterisk) { p.expect(css_lexer.TIdent) return } prefix := nsName.Name nsName.NamespacePrefix = &prefix nsName.Name = p.nameToken() p.advance() } sel.TypeSelector = &nsName } // Parse the subclass selectors subclassSelectors: for { switch p.current().Kind { case css_lexer.THash: if (p.current().Flags & css_lexer.IsID) == 0 { break subclassSelectors } name := p.decoded() sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSHash{Name: name}) p.advance() case css_lexer.TDelimDot: p.advance() name := p.decoded() sel.SubclassSelectors = append(sel.SubclassSelectors, &css_ast.SSClass{Name: name}) p.expect(css_lexer.TIdent) case css_lexer.TOpenBracket: p.advance() attr, good := p.parseAttributeSelector() if !good { return } sel.SubclassSelectors = append(sel.SubclassSelectors, &attr) case css_lexer.TColon: if p.next().Kind == css_lexer.TColon { // Special-case the start of the pseudo-element selector section for p.current().Kind == css_lexer.TColon { isElement := p.next().Kind == css_lexer.TColon if isElement { p.advance() } pseudo := p.parsePseudoClassSelector() // https://www.w3.org/TR/selectors-4/#single-colon-pseudos // The four Level 2 pseudo-elements (::before, ::after, ::first-line, // and ::first-letter) may, for legacy reasons, be represented using // the <pseudo-class-selector> grammar, with only a single ":" // character at their start. if p.options.MinifySyntax && isElement && len(pseudo.Args) == 0 { switch pseudo.Name { case "before", "after", "first-line", "first-letter": isElement = false } } pseudo.IsElement = isElement sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo) } break subclassSelectors } pseudo := p.parsePseudoClassSelector() sel.SubclassSelectors = append(sel.SubclassSelectors, &pseudo) case css_lexer.TDelimAmpersand: // This is an extension: https://drafts.csswg.org/css-nesting-1/ r := p.current().Range p.advance() if sel.NestingSelector == css_ast.NestingSelectorNone { sel.NestingSelector = css_ast.NestingSelectorPresentButNotPrefix p.maybeWarnAboutNesting(r, opts) } default: break subclassSelectors } } // The compound selector must be non-empty if sel.NestingSelector == css_ast.NestingSelectorNone && sel.TypeSelector == nil && len(sel.SubclassSelectors) == 0 { p.unexpected() return } ok = true return } func (p *parser) parseAttributeSelector() (attr css_ast.SSAttribute, ok bool) { // Parse the namespaced name switch p.current().Kind { case css_lexer.TDelimBar, css_lexer.TDelimAsterisk: // "[|x]" // "[*|x]" if p.peek(css_lexer.TDelimAsterisk) { prefix := p.nameToken() p.advance() attr.NamespacedName.NamespacePrefix = &prefix } else { // "[|attr]" is equivalent to "[attr]". From the specification: // "In keeping with the Namespaces in the XML recommendation, default // namespaces do not apply to attributes, therefore attribute selectors // without a namespace component apply only to attributes that have no // namespace (equivalent to |attr)." } if !p.expect(css_lexer.TDelimBar) { return } attr.NamespacedName.Name = p.nameToken() if !p.expect(css_lexer.TIdent) { return } default: // "[x]" // "[x|y]" attr.NamespacedName.Name = p.nameToken() if !p.expect(css_lexer.TIdent) { return } if p.next().Kind != css_lexer.TDelimEquals && p.eat(css_lexer.TDelimBar) { prefix := attr.NamespacedName.Name attr.NamespacedName.NamespacePrefix = &prefix attr.NamespacedName.Name = p.nameToken() if !p.expect(css_lexer.TIdent) { return } } } // Parse the optional matcher operator p.eat(css_lexer.TWhitespace) if p.eat(css_lexer.TDelimEquals) { attr.MatcherOp = "=" } else { switch p.current().Kind { case css_lexer.TDelimTilde: attr.MatcherOp = "~=" case css_lexer.TDelimBar: attr.MatcherOp = "|=" case css_lexer.TDelimCaret: attr.MatcherOp = "^=" case css_lexer.TDelimDollar: attr.MatcherOp = "$=" case css_lexer.TDelimAsterisk: attr.MatcherOp = "*=" } if attr.MatcherOp != "" { p.advance() p.expect(css_lexer.TDelimEquals) } } // Parse the optional matcher value if attr.MatcherOp != "" { p.eat(css_lexer.TWhitespace) if !p.peek(css_lexer.TString) && !p.peek(css_lexer.TIdent) { p.unexpected() } attr.MatcherValue = p.decoded() p.advance() p.eat(css_lexer.TWhitespace) if p.peek(css_lexer.TIdent) { if modifier := p.decoded(); len(modifier) == 1 { if c := modifier[0]; c == 'i' || c == 'I' || c == 's' || c == 'S' { attr.MatcherModifier = c p.advance() } } } } p.expect(css_lexer.TCloseBracket) ok = true return } func (p *parser) parsePseudoClassSelector() css_ast.SSPseudoClass { p.advance() if p.peek(css_lexer.TFunction) { text := p.decoded() p.advance() args := p.convertTokens(p.parseAnyValue()) p.expect(css_lexer.TCloseParen) return css_ast.SSPseudoClass{Name: text, Args: args} } name := p.decoded() sel := css_ast.SSPseudoClass{} if p.expect(css_lexer.TIdent) { sel.Name = name } return sel } func (p *parser) parseAnyValue() []css_lexer.Token { // Reference: https://drafts.csswg.org/css-syntax-3/#typedef-declaration-value p.stack = p.stack[:0] // Reuse allocated memory start := p.index loop: for { switch p.current().Kind { case css_lexer.TCloseParen, css_lexer.TCloseBracket, css_lexer.TCloseBrace: last := len(p.stack) - 1 if last < 0 || !p.peek(p.stack[last]) { break loop } p.stack = p.stack[:last] case css_lexer.TSemicolon, css_lexer.TDelimExclamation: if len(p.stack) == 0 { break loop } case css_lexer.TOpenParen, css_lexer.TFunction: p.stack = append(p.stack, css_lexer.TCloseParen) case css_lexer.TOpenBracket: p.stack = append(p.stack, css_lexer.TCloseBracket) case css_lexer.TOpenBrace: p.stack = append(p.stack, css_lexer.TCloseBrace) } p.advance() } tokens := p.tokens[start:p.index] if len(tokens) == 0 { p.unexpected() } return tokens } func (p *parser) parseCombinator() string { switch p.current().Kind { case css_lexer.TDelimGreaterThan: p.advance() return ">" case css_lexer.TDelimPlus: p.advance() return "+" case css_lexer.TDelimTilde: p.advance() return "~" default: return "" } } ================================================ FILE: lib/esbuild/css_parser/css_parser_test.go ================================================ package css_parser import ( "fmt" "testing" "github.com/withastro/compiler/lib/esbuild/compat" "github.com/withastro/compiler/lib/esbuild/config" "github.com/withastro/compiler/lib/esbuild/css_printer" "github.com/withastro/compiler/lib/esbuild/logger" "github.com/withastro/compiler/lib/esbuild/test" ) func expectParseError(t *testing.T, contents string, expected string) { t.Helper() t.Run(contents, func(t *testing.T) { t.Helper() log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug) Parse(log, test.SourceForTest(contents), Options{}) msgs := log.Done() text := "" for _, msg := range msgs { text += msg.String(logger.OutputOptions{}, logger.TerminalInfo{}) } test.AssertEqualWithDiff(t, text, expected) }) } func expectPrintedCommon(t *testing.T, name string, contents string, expected string, options config.Options) { t.Helper() t.Run(name, func(t *testing.T) { t.Helper() log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug) tree := Parse(log, test.SourceForTest(contents), Options{ MinifySyntax: options.MinifySyntax, MinifyWhitespace: options.MinifyWhitespace, UnsupportedCSSFeatures: options.UnsupportedCSSFeatures, }) msgs := log.Done() text := "" for _, msg := range msgs { if msg.Kind == logger.Error { text += msg.String(logger.OutputOptions{}, logger.TerminalInfo{}) } } test.AssertEqualWithDiff(t, text, "") result := css_printer.Print(tree, css_printer.Options{ MinifyWhitespace: options.MinifyWhitespace, }) test.AssertEqualWithDiff(t, string(result.CSS), expected) }) } func expectPrinted(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents, contents, expected, config.Options{}) } func expectPrintedLower(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [mangle]", contents, expected, config.Options{ UnsupportedCSSFeatures: ^compat.CSSFeature(0), }) } func expectPrintedMinify(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [minify]", contents, expected, config.Options{ MinifyWhitespace: true, }) } func expectPrintedMangle(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [mangle]", contents, expected, config.Options{ MinifySyntax: true, }) } func expectPrintedLowerMangle(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [mangle]", contents, expected, config.Options{ UnsupportedCSSFeatures: ^compat.CSSFeature(0), MinifySyntax: true, }) } func expectPrintedMangleMinify(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [mangle, minify]", contents, expected, config.Options{ MinifySyntax: true, MinifyWhitespace: true, }) } func TestSingleLineComment(t *testing.T) { expectParseError(t, "a, // a\nb // b\n{}", "<stdin>: WARNING: Comments in CSS use \"/* ... */\" instead of \"//\"\n"+ "<stdin>: WARNING: Comments in CSS use \"/* ... */\" instead of \"//\"\n") expectParseError(t, "a, ///// a /////\n{}", "<stdin>: WARNING: Comments in CSS use \"/* ... */\" instead of \"//\"\n") expectPrinted(t, "a, // a\nb // b\n{}", "a, // a b // b {\n}\n") expectPrinted(t, "a, ///// a /////\n{}", "a, ///// a ///// {\n}\n") } func TestEscapes(t *testing.T) { // TIdent expectPrinted(t, "a { value: id\\65nt }", "a {\n value: ident;\n}\n") expectPrinted(t, "a { value: \\69 dent }", "a {\n value: ident;\n}\n") expectPrinted(t, "a { value: \\69dent }", "a {\n value: \u69DEnt;\n}\n") expectPrinted(t, "a { value: \\2cx }", "a {\n value: \\,x;\n}\n") expectPrinted(t, "a { value: \\,x }", "a {\n value: \\,x;\n}\n") expectPrinted(t, "a { value: x\\2c }", "a {\n value: x\\,;\n}\n") expectPrinted(t, "a { value: x\\, }", "a {\n value: x\\,;\n}\n") expectPrinted(t, "a { value: x\\0 }", "a {\n value: x\uFFFD;\n}\n") expectPrinted(t, "a { value: x\\1 }", "a {\n value: x\\\x01;\n}\n") expectPrinted(t, "a { value: x\x00 }", "a {\n value: x\uFFFD;\n}\n") expectPrinted(t, "a { value: x\x01 }", "a {\n value: x\x01;\n}\n") // THash expectPrinted(t, "a { value: #0h\\61sh }", "a {\n value: #0hash;\n}\n") expectPrinted(t, "a { value: #\\30hash }", "a {\n value: #0hash;\n}\n") expectPrinted(t, "a { value: #\\2cx }", "a {\n value: #\\,x;\n}\n") expectPrinted(t, "a { value: #\\,x }", "a {\n value: #\\,x;\n}\n") // THashID expectPrinted(t, "a { value: #h\\61sh }", "a {\n value: #hash;\n}\n") expectPrinted(t, "a { value: #\\68 ash }", "a {\n value: #hash;\n}\n") expectPrinted(t, "a { value: #\\68ash }", "a {\n value: #\u068Ash;\n}\n") expectPrinted(t, "a { value: #x\\2c }", "a {\n value: #x\\,;\n}\n") expectPrinted(t, "a { value: #x\\, }", "a {\n value: #x\\,;\n}\n") // TFunction expectPrinted(t, "a { value: f\\6e() }", "a {\n value: fn();\n}\n") expectPrinted(t, "a { value: \\66n() }", "a {\n value: fn();\n}\n") expectPrinted(t, "a { value: \\2cx() }", "a {\n value: \\,x();\n}\n") expectPrinted(t, "a { value: \\,x() }", "a {\n value: \\,x();\n}\n") expectPrinted(t, "a { value: x\\2c() }", "a {\n value: x\\,();\n}\n") expectPrinted(t, "a { value: x\\,() }", "a {\n value: x\\,();\n}\n") // TString expectPrinted(t, "a { value: 'a\\62 c' }", "a {\n value: \"abc\";\n}\n") expectPrinted(t, "a { value: 'a\\62c' }", "a {\n value: \"a\u062C\";\n}\n") expectPrinted(t, "a { value: '\\61 bc' }", "a {\n value: \"abc\";\n}\n") expectPrinted(t, "a { value: '\\61bc' }", "a {\n value: \"\u61BC\";\n}\n") expectPrinted(t, "a { value: '\\2c' }", "a {\n value: \",\";\n}\n") expectPrinted(t, "a { value: '\\,' }", "a {\n value: \",\";\n}\n") expectPrinted(t, "a { value: '\\0' }", "a {\n value: \"\uFFFD\";\n}\n") expectPrinted(t, "a { value: '\\1' }", "a {\n value: \"\x01\";\n}\n") expectPrinted(t, "a { value: '\x00' }", "a {\n value: \"\uFFFD\";\n}\n") expectPrinted(t, "a { value: '\x01' }", "a {\n value: \"\x01\";\n}\n") // TURL expectPrinted(t, "a { value: url(a\\62 c) }", "a {\n value: url(abc);\n}\n") expectPrinted(t, "a { value: url(a\\62c) }", "a {\n value: url(a\u062C);\n}\n") expectPrinted(t, "a { value: url(\\61 bc) }", "a {\n value: url(abc);\n}\n") expectPrinted(t, "a { value: url(\\61bc) }", "a {\n value: url(\u61BC);\n}\n") expectPrinted(t, "a { value: url(\\2c) }", "a {\n value: url(,);\n}\n") expectPrinted(t, "a { value: url(\\,) }", "a {\n value: url(,);\n}\n") // TAtKeyword expectPrinted(t, "a { value: @k\\65yword }", "a {\n value: @keyword;\n}\n") expectPrinted(t, "a { value: @\\6b eyword }", "a {\n value: @keyword;\n}\n") expectPrinted(t, "a { value: @\\6beyword }", "a {\n value: @\u06BEyword;\n}\n") expectPrinted(t, "a { value: @\\2cx }", "a {\n value: @\\,x;\n}\n") expectPrinted(t, "a { value: @\\,x }", "a {\n value: @\\,x;\n}\n") expectPrinted(t, "a { value: @x\\2c }", "a {\n value: @x\\,;\n}\n") expectPrinted(t, "a { value: @x\\, }", "a {\n value: @x\\,;\n}\n") // TDimension expectPrinted(t, "a { value: 10\\65m }", "a {\n value: 10em;\n}\n") expectPrinted(t, "a { value: 10p\\32x }", "a {\n value: 10p2x;\n}\n") expectPrinted(t, "a { value: 10e\\32x }", "a {\n value: 10\\65 2x;\n}\n") expectPrinted(t, "a { value: 10\\32x }", "a {\n value: 10\\32x;\n}\n") expectPrinted(t, "a { value: 10\\2cx }", "a {\n value: 10\\,x;\n}\n") expectPrinted(t, "a { value: 10\\,x }", "a {\n value: 10\\,x;\n}\n") expectPrinted(t, "a { value: 10x\\2c }", "a {\n value: 10x\\,;\n}\n") expectPrinted(t, "a { value: 10x\\, }", "a {\n value: 10x\\,;\n}\n") // RDeclaration expectPrintedMangle(t, "a { c\\6flor: #f00 }", "a {\n color: red;\n}\n") expectPrintedMangle(t, "a { \\63olor: #f00 }", "a {\n color: red;\n}\n") expectPrintedMangle(t, "a { \\2color: #f00 }", "a {\n \\,olor: #f00;\n}\n") expectPrintedMangle(t, "a { \\,olor: #f00 }", "a {\n \\,olor: #f00;\n}\n") // RUnknownAt expectPrinted(t, "@unknown;", "@unknown;\n") expectPrinted(t, "@u\\6eknown;", "@unknown;\n") expectPrinted(t, "@\\75nknown;", "@unknown;\n") expectPrinted(t, "@u\\2cnknown;", "@u\\,nknown;\n") expectPrinted(t, "@u\\,nknown;", "@u\\,nknown;\n") expectPrinted(t, "@\\2cunknown;", "@\\,unknown;\n") expectPrinted(t, "@\\,unknown;", "@\\,unknown;\n") // RAtKeyframes expectPrinted(t, "@k\\65yframes abc { from {} }", "@keyframes abc {\n from {\n }\n}\n") expectPrinted(t, "@keyframes \\61 bc { from {} }", "@keyframes abc {\n from {\n }\n}\n") expectPrinted(t, "@keyframes a\\62 c { from {} }", "@keyframes abc {\n from {\n }\n}\n") expectPrinted(t, "@keyframes abc { \\66rom {} }", "@keyframes abc {\n from {\n }\n}\n") expectPrinted(t, "@keyframes a\\2c c { \\66rom {} }", "@keyframes a\\,c {\n from {\n }\n}\n") expectPrinted(t, "@keyframes a\\,c { \\66rom {} }", "@keyframes a\\,c {\n from {\n }\n}\n") // RAtNamespace expectPrinted(t, "@n\\61mespace ns 'path';", "@namespace ns \"path\";\n") expectPrinted(t, "@namespace \\6es 'path';", "@namespace ns \"path\";\n") expectPrinted(t, "@namespace ns 'p\\61th';", "@namespace ns \"path\";\n") expectPrinted(t, "@namespace \\2cs 'p\\61th';", "@namespace \\,s \"path\";\n") expectPrinted(t, "@namespace \\,s 'p\\61th';", "@namespace \\,s \"path\";\n") // CompoundSelector expectPrinted(t, "* {}", "* {\n}\n") expectPrinted(t, "*|div {}", "*|div {\n}\n") expectPrinted(t, "\\2a {}", "\\* {\n}\n") expectPrinted(t, "\\2a|div {}", "\\*|div {\n}\n") expectPrinted(t, "\\2d {}", "\\- {\n}\n") expectPrinted(t, "\\2d- {}", "-- {\n}\n") expectPrinted(t, "-\\2d {}", "-- {\n}\n") expectPrinted(t, "\\2d 123 {}", "\\-123 {\n}\n") // SSHash expectPrinted(t, "#h\\61sh {}", "#hash {\n}\n") expectPrinted(t, "#\\2chash {}", "#\\,hash {\n}\n") expectPrinted(t, "#\\,hash {}", "#\\,hash {\n}\n") expectPrinted(t, "#\\2d {}", "#\\- {\n}\n") expectPrinted(t, "#\\2d- {}", "#-- {\n}\n") expectPrinted(t, "#-\\2d {}", "#-- {\n}\n") expectPrinted(t, "#\\2d 123 {}", "#\\-123 {\n}\n") expectPrinted(t, "#\\61hash {}", "#ahash {\n}\n") expectPrinted(t, "#\\30hash {}", "#\\30hash {\n}\n") expectPrinted(t, "#0\\2chash {}", "#0\\,hash {\n}\n") expectPrinted(t, "#0\\,hash {}", "#0\\,hash {\n}\n") // SSClass expectPrinted(t, ".cl\\61ss {}", ".class {\n}\n") expectPrinted(t, ".\\2c class {}", ".\\,class {\n}\n") expectPrinted(t, ".\\,class {}", ".\\,class {\n}\n") // SSPseudoClass expectPrinted(t, ":pseudocl\\61ss {}", ":pseudoclass {\n}\n") expectPrinted(t, ":pseudo\\2c class {}", ":pseudo\\,class {\n}\n") expectPrinted(t, ":pseudo\\,class {}", ":pseudo\\,class {\n}\n") expectPrinted(t, ":pseudo(cl\\61ss) {}", ":pseudo(class) {\n}\n") expectPrinted(t, ":pseudo(cl\\2css) {}", ":pseudo(cl\\,ss) {\n}\n") expectPrinted(t, ":pseudo(cl\\,ss) {}", ":pseudo(cl\\,ss) {\n}\n") // SSAttribute expectPrinted(t, "[\\61ttr] {}", "[attr] {\n}\n") expectPrinted(t, "[\\2c attr] {}", "[\\,attr] {\n}\n") expectPrinted(t, "[\\,attr] {}", "[\\,attr] {\n}\n") expectPrinted(t, "[attr\\7e=x] {}", "[attr\\~=x] {\n}\n") expectPrinted(t, "[attr\\~=x] {}", "[attr\\~=x] {\n}\n") expectPrinted(t, "[attr=\\2c] {}", "[attr=\",\"] {\n}\n") expectPrinted(t, "[attr=\\,] {}", "[attr=\",\"] {\n}\n") expectPrinted(t, "[attr=\"-\"] {}", "[attr=\"-\"] {\n}\n") expectPrinted(t, "[attr=\"--\"] {}", "[attr=--] {\n}\n") expectPrinted(t, "[attr=\"-a\"] {}", "[attr=-a] {\n}\n") expectPrinted(t, "[\\6es|attr] {}", "[ns|attr] {\n}\n") expectPrinted(t, "[ns|\\61ttr] {}", "[ns|attr] {\n}\n") expectPrinted(t, "[\\2cns|attr] {}", "[\\,ns|attr] {\n}\n") expectPrinted(t, "[ns|\\2c attr] {}", "[ns|\\,attr] {\n}\n") expectPrinted(t, "[*|attr] {}", "[*|attr] {\n}\n") expectPrinted(t, "[\\2a|attr] {}", "[\\*|attr] {\n}\n") } func TestString(t *testing.T) { expectPrinted(t, "a:after { content: 'a\\\rb' }", "a:after {\n content: \"ab\";\n}\n") expectPrinted(t, "a:after { content: 'a\\\nb' }", "a:after {\n content: \"ab\";\n}\n") expectPrinted(t, "a:after { content: 'a\\\fb' }", "a:after {\n content: \"ab\";\n}\n") expectPrinted(t, "a:after { content: 'a\\\r\nb' }", "a:after {\n content: \"ab\";\n}\n") expectPrinted(t, "a:after { content: 'a\\62 c' }", "a:after {\n content: \"abc\";\n}\n") expectParseError(t, "a:after { content: '\r' }", `<stdin>: ERROR: Unterminated string token <stdin>: ERROR: Unterminated string token <stdin>: WARNING: Expected "}" but found end of file `) expectParseError(t, "a:after { content: '\n' }", `<stdin>: ERROR: Unterminated string token <stdin>: ERROR: Unterminated string token <stdin>: WARNING: Expected "}" but found end of file `) expectParseError(t, "a:after { content: '\f' }", `<stdin>: ERROR: Unterminated string token <stdin>: ERROR: Unterminated string token <stdin>: WARNING: Expected "}" but found end of file `) expectParseError(t, "a:after { content: '\r\n' }", `<stdin>: ERROR: Unterminated string token <stdin>: ERROR: Unterminated string token <stdin>: WARNING: Expected "}" but found end of file `) expectPrinted(t, "a:after { content: '\\1010101' }", "a:after {\n content: \"\U001010101\";\n}\n") expectPrinted(t, "a:after { content: '\\invalid' }", "a:after {\n content: \"invalid\";\n}\n") } func TestNumber(t *testing.T) { for _, ext := range []string{"", "%", "px+"} { expectPrinted(t, "a { width: .0"+ext+"; }", "a {\n width: .0"+ext+";\n}\n") expectPrinted(t, "a { width: .00"+ext+"; }", "a {\n width: .00"+ext+";\n}\n") expectPrinted(t, "a { width: .10"+ext+"; }", "a {\n width: .10"+ext+";\n}\n") expectPrinted(t, "a { width: 0."+ext+"; }", "a {\n width: 0."+ext+";\n}\n") expectPrinted(t, "a { width: 0.0"+ext+"; }", "a {\n width: 0.0"+ext+";\n}\n") expectPrinted(t, "a { width: 0.1"+ext+"; }", "a {\n width: 0.1"+ext+";\n}\n") expectPrinted(t, "a { width: +.0"+ext+"; }", "a {\n width: +.0"+ext+";\n}\n") expectPrinted(t, "a { width: +.00"+ext+"; }", "a {\n width: +.00"+ext+";\n}\n") expectPrinted(t, "a { width: +.10"+ext+"; }", "a {\n width: +.10"+ext+";\n}\n") expectPrinted(t, "a { width: +0."+ext+"; }", "a {\n width: +0."+ext+";\n}\n") expectPrinted(t, "a { width: +0.0"+ext+"; }", "a {\n width: +0.0"+ext+";\n}\n") expectPrinted(t, "a { width: +0.1"+ext+"; }", "a {\n width: +0.1"+ext+";\n}\n") expectPrinted(t, "a { width: -.0"+ext+"; }", "a {\n width: -.0"+ext+";\n}\n") expectPrinted(t, "a { width: -.00"+ext+"; }", "a {\n width: -.00"+ext+";\n}\n") expectPrinted(t, "a { width: -.10"+ext+"; }", "a {\n width: -.10"+ext+";\n}\n") expectPrinted(t, "a { width: -0."+ext+"; }", "a {\n width: -0."+ext+";\n}\n") expectPrinted(t, "a { width: -0.0"+ext+"; }", "a {\n width: -0.0"+ext+";\n}\n") expectPrinted(t, "a { width: -0.1"+ext+"; }", "a {\n width: -0.1"+ext+";\n}\n") expectPrintedMangle(t, "a { width: .0"+ext+"; }", "a {\n width: 0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: .00"+ext+"; }", "a {\n width: 0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: .10"+ext+"; }", "a {\n width: .1"+ext+";\n}\n") expectPrintedMangle(t, "a { width: 0."+ext+"; }", "a {\n width: 0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: 0.0"+ext+"; }", "a {\n width: 0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: 0.1"+ext+"; }", "a {\n width: .1"+ext+";\n}\n") expectPrintedMangle(t, "a { width: +.0"+ext+"; }", "a {\n width: +0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: +.00"+ext+"; }", "a {\n width: +0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: +.10"+ext+"; }", "a {\n width: +.1"+ext+";\n}\n") expectPrintedMangle(t, "a { width: +0."+ext+"; }", "a {\n width: +0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: +0.0"+ext+"; }", "a {\n width: +0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: +0.1"+ext+"; }", "a {\n width: +.1"+ext+";\n}\n") expectPrintedMangle(t, "a { width: -.0"+ext+"; }", "a {\n width: -0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: -.00"+ext+"; }", "a {\n width: -0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: -.10"+ext+"; }", "a {\n width: -.1"+ext+";\n}\n") expectPrintedMangle(t, "a { width: -0."+ext+"; }", "a {\n width: -0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: -0.0"+ext+"; }", "a {\n width: -0"+ext+";\n}\n") expectPrintedMangle(t, "a { width: -0.1"+ext+"; }", "a {\n width: -.1"+ext+";\n}\n") } } func TestHexColor(t *testing.T) { // "#RGBA" expectPrinted(t, "a { color: #1234 }", "a {\n color: #1234;\n}\n") expectPrinted(t, "a { color: #123f }", "a {\n color: #123f;\n}\n") expectPrinted(t, "a { color: #abcd }", "a {\n color: #abcd;\n}\n") expectPrinted(t, "a { color: #abcf }", "a {\n color: #abcf;\n}\n") expectPrinted(t, "a { color: #ABCD }", "a {\n color: #ABCD;\n}\n") expectPrinted(t, "a { color: #ABCF }", "a {\n color: #ABCF;\n}\n") expectPrintedMangle(t, "a { color: #1234 }", "a {\n color: #1234;\n}\n") expectPrintedMangle(t, "a { color: #123f }", "a {\n color: #123;\n}\n") expectPrintedMangle(t, "a { color: #abcd }", "a {\n color: #abcd;\n}\n") expectPrintedMangle(t, "a { color: #abcf }", "a {\n color: #abc;\n}\n") expectPrintedMangle(t, "a { color: #ABCD }", "a {\n color: #abcd;\n}\n") expectPrintedMangle(t, "a { color: #ABCF }", "a {\n color: #abc;\n}\n") // "#RRGGBB" expectPrinted(t, "a { color: #112233 }", "a {\n color: #112233;\n}\n") expectPrinted(t, "a { color: #122233 }", "a {\n color: #122233;\n}\n") expectPrinted(t, "a { color: #112333 }", "a {\n color: #112333;\n}\n") expectPrinted(t, "a { color: #112234 }", "a {\n color: #112234;\n}\n") expectPrintedMangle(t, "a { color: #112233 }", "a {\n color: #123;\n}\n") expectPrintedMangle(t, "a { color: #122233 }", "a {\n color: #122233;\n}\n") expectPrintedMangle(t, "a { color: #112333 }", "a {\n color: #112333;\n}\n") expectPrintedMangle(t, "a { color: #112234 }", "a {\n color: #112234;\n}\n") expectPrinted(t, "a { color: #aabbcc }", "a {\n color: #aabbcc;\n}\n") expectPrinted(t, "a { color: #abbbcc }", "a {\n color: #abbbcc;\n}\n") expectPrinted(t, "a { color: #aabccc }", "a {\n color: #aabccc;\n}\n") expectPrinted(t, "a { color: #aabbcd }", "a {\n color: #aabbcd;\n}\n") expectPrintedMangle(t, "a { color: #aabbcc }", "a {\n color: #abc;\n}\n") expectPrintedMangle(t, "a { color: #abbbcc }", "a {\n color: #abbbcc;\n}\n") expectPrintedMangle(t, "a { color: #aabccc }", "a {\n color: #aabccc;\n}\n") expectPrintedMangle(t, "a { color: #aabbcd }", "a {\n color: #aabbcd;\n}\n") expectPrinted(t, "a { color: #AABBCC }", "a {\n color: #AABBCC;\n}\n") expectPrinted(t, "a { color: #ABBBCC }", "a {\n color: #ABBBCC;\n}\n") expectPrinted(t, "a { color: #AABCCC }", "a {\n color: #AABCCC;\n}\n") expectPrinted(t, "a { color: #AABBCD }", "a {\n color: #AABBCD;\n}\n") expectPrintedMangle(t, "a { color: #AABBCC }", "a {\n color: #abc;\n}\n") expectPrintedMangle(t, "a { color: #ABBBCC }", "a {\n color: #abbbcc;\n}\n") expectPrintedMangle(t, "a { color: #AABCCC }", "a {\n color: #aabccc;\n}\n") expectPrintedMangle(t, "a { color: #AABBCD }", "a {\n color: #aabbcd;\n}\n") // "#RRGGBBAA" expectPrinted(t, "a { color: #11223344 }", "a {\n color: #11223344;\n}\n") expectPrinted(t, "a { color: #12223344 }", "a {\n color: #12223344;\n}\n") expectPrinted(t, "a { color: #11233344 }", "a {\n color: #11233344;\n}\n") expectPrinted(t, "a { color: #11223444 }", "a {\n color: #11223444;\n}\n") expectPrinted(t, "a { color: #11223345 }", "a {\n color: #11223345;\n}\n") expectPrintedMangle(t, "a { color: #11223344 }", "a {\n color: #1234;\n}\n") expectPrintedMangle(t, "a { color: #12223344 }", "a {\n color: #12223344;\n}\n") expectPrintedMangle(t, "a { color: #11233344 }", "a {\n color: #11233344;\n}\n") expectPrintedMangle(t, "a { color: #11223444 }", "a {\n color: #11223444;\n}\n") expectPrintedMangle(t, "a { color: #11223345 }", "a {\n color: #11223345;\n}\n") expectPrinted(t, "a { color: #aabbccdd }", "a {\n color: #aabbccdd;\n}\n") expectPrinted(t, "a { color: #abbbccdd }", "a {\n color: #abbbccdd;\n}\n") expectPrinted(t, "a { color: #aabcccdd }", "a {\n color: #aabcccdd;\n}\n") expectPrinted(t, "a { color: #aabbcddd }", "a {\n color: #aabbcddd;\n}\n") expectPrinted(t, "a { color: #aabbccde }", "a {\n color: #aabbccde;\n}\n") expectPrintedMangle(t, "a { color: #aabbccdd }", "a {\n color: #abcd;\n}\n") expectPrintedMangle(t, "a { color: #abbbccdd }", "a {\n color: #abbbccdd;\n}\n") expectPrintedMangle(t, "a { color: #aabcccdd }", "a {\n color: #aabcccdd;\n}\n") expectPrintedMangle(t, "a { color: #aabbcddd }", "a {\n color: #aabbcddd;\n}\n") expectPrintedMangle(t, "a { color: #aabbccde }", "a {\n color: #aabbccde;\n}\n") expectPrinted(t, "a { color: #AABBCCDD }", "a {\n color: #AABBCCDD;\n}\n") expectPrinted(t, "a { color: #ABBBCCDD }", "a {\n color: #ABBBCCDD;\n}\n") expectPrinted(t, "a { color: #AABCCCDD }", "a {\n color: #AABCCCDD;\n}\n") expectPrinted(t, "a { color: #AABBCDDD }", "a {\n color: #AABBCDDD;\n}\n") expectPrinted(t, "a { color: #AABBCCDE }", "a {\n color: #AABBCCDE;\n}\n") expectPrintedMangle(t, "a { color: #AABBCCDD }", "a {\n color: #abcd;\n}\n") expectPrintedMangle(t, "a { color: #ABBBCCDD }", "a {\n color: #abbbccdd;\n}\n") expectPrintedMangle(t, "a { color: #AABCCCDD }", "a {\n color: #aabcccdd;\n}\n") expectPrintedMangle(t, "a { color: #AABBCDDD }", "a {\n color: #aabbcddd;\n}\n") expectPrintedMangle(t, "a { color: #AABBCCDE }", "a {\n color: #aabbccde;\n}\n") // "#RRGGBBFF" expectPrinted(t, "a { color: #112233ff }", "a {\n color: #112233ff;\n}\n") expectPrinted(t, "a { color: #122233ff }", "a {\n color: #122233ff;\n}\n") expectPrinted(t, "a { color: #112333ff }", "a {\n color: #112333ff;\n}\n") expectPrinted(t, "a { color: #112234ff }", "a {\n color: #112234ff;\n}\n") expectPrinted(t, "a { color: #112233ef }", "a {\n color: #112233ef;\n}\n") expectPrintedMangle(t, "a { color: #112233ff }", "a {\n color: #123;\n}\n") expectPrintedMangle(t, "a { color: #122233ff }", "a {\n color: #122233;\n}\n") expectPrintedMangle(t, "a { color: #112333ff }", "a {\n color: #112333;\n}\n") expectPrintedMangle(t, "a { color: #112234ff }", "a {\n color: #112234;\n}\n") expectPrintedMangle(t, "a { color: #112233ef }", "a {\n color: #112233ef;\n}\n") expectPrinted(t, "a { color: #aabbccff }", "a {\n color: #aabbccff;\n}\n") expectPrinted(t, "a { color: #abbbccff }", "a {\n color: #abbbccff;\n}\n") expectPrinted(t, "a { color: #aabcccff }", "a {\n color: #aabcccff;\n}\n") expectPrinted(t, "a { color: #aabbcdff }", "a {\n color: #aabbcdff;\n}\n") expectPrinted(t, "a { color: #aabbccef }", "a {\n color: #aabbccef;\n}\n") expectPrintedMangle(t, "a { color: #aabbccff }", "a {\n color: #abc;\n}\n") expectPrintedMangle(t, "a { color: #abbbccff }", "a {\n color: #abbbcc;\n}\n") expectPrintedMangle(t, "a { color: #aabcccff }", "a {\n color: #aabccc;\n}\n") expectPrintedMangle(t, "a { color: #aabbcdff }", "a {\n color: #aabbcd;\n}\n") expectPrintedMangle(t, "a { color: #aabbccef }", "a {\n color: #aabbccef;\n}\n") expectPrinted(t, "a { color: #AABBCCFF }", "a {\n color: #AABBCCFF;\n}\n") expectPrinted(t, "a { color: #ABBBCCFF }", "a {\n color: #ABBBCCFF;\n}\n") expectPrinted(t, "a { color: #AABCCCFF }", "a {\n color: #AABCCCFF;\n}\n") expectPrinted(t, "a { color: #AABBCDFF }", "a {\n color: #AABBCDFF;\n}\n") expectPrinted(t, "a { color: #AABBCCEF }", "a {\n color: #AABBCCEF;\n}\n") expectPrintedMangle(t, "a { color: #AABBCCFF }", "a {\n color: #abc;\n}\n") expectPrintedMangle(t, "a { color: #ABBBCCFF }", "a {\n color: #abbbcc;\n}\n") expectPrintedMangle(t, "a { color: #AABCCCFF }", "a {\n color: #aabccc;\n}\n") expectPrintedMangle(t, "a { color: #AABBCDFF }", "a {\n color: #aabbcd;\n}\n") expectPrintedMangle(t, "a { color: #AABBCCEF }", "a {\n color: #aabbccef;\n}\n") } func TestColorNames(t *testing.T) { expectPrinted(t, "a { color: #f00 }", "a {\n color: #f00;\n}\n") expectPrinted(t, "a { color: #f00f }", "a {\n color: #f00f;\n}\n") expectPrinted(t, "a { color: #ff0000 }", "a {\n color: #ff0000;\n}\n") expectPrinted(t, "a { color: #ff0000ff }", "a {\n color: #ff0000ff;\n}\n") expectPrintedMangle(t, "a { color: #f00 }", "a {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: #f00e }", "a {\n color: #f00e;\n}\n") expectPrintedMangle(t, "a { color: #f00f }", "a {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: #ff0000 }", "a {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: #ff0000ef }", "a {\n color: #ff0000ef;\n}\n") expectPrintedMangle(t, "a { color: #ff0000ff }", "a {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: #ffc0cb }", "a {\n color: pink;\n}\n") expectPrintedMangle(t, "a { color: #ffc0cbef }", "a {\n color: #ffc0cbef;\n}\n") expectPrintedMangle(t, "a { color: #ffc0cbff }", "a {\n color: pink;\n}\n") expectPrinted(t, "a { color: white }", "a {\n color: white;\n}\n") expectPrinted(t, "a { color: tUrQuOiSe }", "a {\n color: tUrQuOiSe;\n}\n") expectPrintedMangle(t, "a { color: white }", "a {\n color: #fff;\n}\n") expectPrintedMangle(t, "a { color: tUrQuOiSe }", "a {\n color: #40e0d0;\n}\n") } func TestColorRGBA(t *testing.T) { expectPrintedMangle(t, "a { color: rgba(1 2 3 / 0.5) }", "a {\n color: #01020380;\n}\n") expectPrintedMangle(t, "a { color: rgba(1 2 3 / 50%) }", "a {\n color: #0102037f;\n}\n") expectPrintedMangle(t, "a { color: rgba(1, 2, 3, 0.5) }", "a {\n color: #01020380;\n}\n") expectPrintedMangle(t, "a { color: rgba(1, 2, 3, 50%) }", "a {\n color: #0102037f;\n}\n") expectPrintedMangle(t, "a { color: rgba(1% 2% 3% / 0.5) }", "a {\n color: #03050880;\n}\n") expectPrintedMangle(t, "a { color: rgba(1% 2% 3% / 50%) }", "a {\n color: #0305087f;\n}\n") expectPrintedMangle(t, "a { color: rgba(1%, 2%, 3%, 0.5) }", "a {\n color: #03050880;\n}\n") expectPrintedMangle(t, "a { color: rgba(1%, 2%, 3%, 50%) }", "a {\n color: #0305087f;\n}\n") expectPrintedLowerMangle(t, "a { color: rgb(1, 2, 3, 0.4) }", "a {\n color: rgba(1, 2, 3, .4);\n}\n") expectPrintedLowerMangle(t, "a { color: rgba(1, 2, 3, 40%) }", "a {\n color: rgba(1, 2, 3, .4);\n}\n") expectPrintedLowerMangle(t, "a { color: rgb(var(--x) var(--y) var(--z)) }", "a {\n color: rgb(var(--x) var(--y) var(--z));\n}\n") } func TestColorHSLA(t *testing.T) { expectPrintedMangle(t, ".red { color: hsl(0, 100%, 50%) }", ".red {\n color: red;\n}\n") expectPrintedMangle(t, ".orange { color: hsl(30deg, 100%, 50%) }", ".orange {\n color: #ff8000;\n}\n") expectPrintedMangle(t, ".yellow { color: hsl(60 100% 50%) }", ".yellow {\n color: #ff0;\n}\n") expectPrintedMangle(t, ".green { color: hsl(120, 100%, 50%) }", ".green {\n color: #0f0;\n}\n") expectPrintedMangle(t, ".cyan { color: hsl(200grad, 100%, 50%) }", ".cyan {\n color: #0ff;\n}\n") expectPrintedMangle(t, ".blue { color: hsl(240, 100%, 50%) }", ".blue {\n color: #00f;\n}\n") expectPrintedMangle(t, ".purple { color: hsl(0.75turn 100% 50%) }", ".purple {\n color: #7f00ff;\n}\n") expectPrintedMangle(t, ".magenta { color: hsl(300, 100%, 50%) }", ".magenta {\n color: #f0f;\n}\n") expectPrintedMangle(t, "a { color: hsl(30 25% 50% / 50%) }", "a {\n color: #9f80607f;\n}\n") expectPrintedMangle(t, "a { color: hsla(30 25% 50% / 50%) }", "a {\n color: #9f80607f;\n}\n") expectPrintedLowerMangle(t, "a { color: hsl(1, 2%, 3%, 0.4) }", "a {\n color: rgba(8, 8, 7, .4);\n}\n") expectPrintedLowerMangle(t, "a { color: hsla(1, 2%, 3%, 40%) }", "a {\n color: rgba(8, 8, 7, .4);\n}\n") expectPrintedLowerMangle(t, "a { color: hsl(var(--x) var(--y) var(--z)) }", "a {\n color: hsl(var(--x) var(--y) var(--z));\n}\n") } func TestLowerColor(t *testing.T) { expectPrintedLower(t, "a { color: rebeccapurple }", "a {\n color: #663399;\n}\n") expectPrintedLower(t, "a { color: #0123 }", "a {\n color: rgba(0, 17, 34, 0.2);\n}\n") expectPrintedLower(t, "a { color: #1230 }", "a {\n color: rgba(17, 34, 51, 0);\n}\n") expectPrintedLower(t, "a { color: #1234 }", "a {\n color: rgba(17, 34, 51, 0.267);\n}\n") expectPrintedLower(t, "a { color: #123f }", "a {\n color: rgba(17, 34, 51, 1);\n}\n") expectPrintedLower(t, "a { color: #12345678 }", "a {\n color: rgba(18, 52, 86, 0.471);\n}\n") expectPrintedLower(t, "a { color: #ff00007f }", "a {\n color: rgba(255, 0, 0, 0.498);\n}\n") expectPrintedLower(t, "a { color: rgb(1 2 3) }", "a {\n color: rgb(1, 2, 3);\n}\n") expectPrintedLower(t, "a { color: hsl(1 2% 3%) }", "a {\n color: hsl(1, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: rgba(1% 2% 3%) }", "a {\n color: rgb(1%, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: hsla(1deg 2% 3%) }", "a {\n color: hsl(1, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: hsla(200grad 2% 3%) }", "a {\n color: hsl(180, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: hsla(6.28319rad 2% 3%) }", "a {\n color: hsl(360, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: hsla(0.5turn 2% 3%) }", "a {\n color: hsl(180, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: hsla(+200grad 2% 3%) }", "a {\n color: hsl(180, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: hsla(-200grad 2% 3%) }", "a {\n color: hsl(-180, 2%, 3%);\n}\n") expectPrintedLower(t, "a { color: rgb(1 2 3 / 4) }", "a {\n color: rgba(1, 2, 3, 4);\n}\n") expectPrintedLower(t, "a { color: rgba(1% 2% 3% / 4%) }", "a {\n color: rgba(1%, 2%, 3%, 0.04);\n}\n") expectPrintedLower(t, "a { color: hsl(1 2% 3% / 4) }", "a {\n color: hsla(1, 2%, 3%, 4);\n}\n") expectPrintedLower(t, "a { color: hsla(1 2% 3% / 4%) }", "a {\n color: hsla(1, 2%, 3%, 0.04);\n}\n") expectPrintedLower(t, "a { color: rgb(1, 2, 3, 4) }", "a {\n color: rgba(1, 2, 3, 4);\n}\n") expectPrintedLower(t, "a { color: rgba(1%, 2%, 3%, 4%) }", "a {\n color: rgba(1%, 2%, 3%, 0.04);\n}\n") expectPrintedLower(t, "a { color: rgb(1%, 2%, 3%, 0.4%) }", "a {\n color: rgba(1%, 2%, 3%, 0.004);\n}\n") expectPrintedLower(t, "a { color: hsl(1, 2%, 3%, 4) }", "a {\n color: hsla(1, 2%, 3%, 4);\n}\n") expectPrintedLower(t, "a { color: hsla(1deg, 2%, 3%, 4%) }", "a {\n color: hsla(1, 2%, 3%, 0.04);\n}\n") expectPrintedLower(t, "a { color: hsl(1deg, 2%, 3%, 0.4%) }", "a {\n color: hsla(1, 2%, 3%, 0.004);\n}\n") } func TestDeclaration(t *testing.T) { expectPrinted(t, ".decl {}", ".decl {\n}\n") expectPrinted(t, ".decl { a: b }", ".decl {\n a: b;\n}\n") expectPrinted(t, ".decl { a: b; }", ".decl {\n a: b;\n}\n") expectPrinted(t, ".decl { a: b; c: d }", ".decl {\n a: b;\n c: d;\n}\n") expectPrinted(t, ".decl { a: b; c: d; }", ".decl {\n a: b;\n c: d;\n}\n") expectParseError(t, ".decl { a { b: c; } }", "<stdin>: WARNING: Expected \":\"\n") expectPrinted(t, ".decl { & a { b: c; } }", ".decl {\n & a {\n b: c;\n }\n}\n") // See http://browserhacks.com/ expectPrinted(t, ".selector { (;property: value;); }", ".selector {\n (;property: value;);\n}\n") expectPrinted(t, ".selector { [;property: value;]; }", ".selector {\n [;property: value;];\n}\n") expectPrinted(t, ".selector, {}", ".selector, {\n}\n") expectPrinted(t, ".selector\\ {}", ".selector\\ {\n}\n") expectPrinted(t, ".selector { property: value\\9; }", ".selector {\n property: value\\\t;\n}\n") expectPrinted(t, "@media \\0screen\\,screen\\9 {}", "@media \uFFFDscreen\\,screen\\\t {\n}\n") } func TestSelector(t *testing.T) { expectPrinted(t, "a{}", "a {\n}\n") expectPrinted(t, "a {}", "a {\n}\n") expectPrinted(t, "a b {}", "a b {\n}\n") expectPrinted(t, "a/**/b {}", "a b {\n}\n") expectPrinted(t, "a/**/.b {}", "a.b {\n}\n") expectPrinted(t, "a/**/:b {}", "a:b {\n}\n") expectPrinted(t, "a/**/[b] {}", "a[b] {\n}\n") expectPrinted(t, "a>/**/b {}", "a > b {\n}\n") expectPrinted(t, "a+/**/b {}", "a + b {\n}\n") expectPrinted(t, "a~/**/b {}", "a ~ b {\n}\n") expectPrinted(t, "[b]{}", "[b] {\n}\n") expectPrinted(t, "[b] {}", "[b] {\n}\n") expectPrinted(t, "a[b] {}", "a[b] {\n}\n") expectPrinted(t, "a [b] {}", "a [b] {\n}\n") expectParseError(t, "[] {}", "<stdin>: WARNING: Expected identifier but found \"]\"\n") expectParseError(t, "[b {}", "<stdin>: WARNING: Expected \"]\" but found \"{\"\n") expectParseError(t, "[b]] {}", "<stdin>: WARNING: Unexpected \"]\"\n") expectParseError(t, "a[b {}", "<stdin>: WARNING: Expected \"]\" but found \"{\"\n") expectParseError(t, "a[b]] {}", "<stdin>: WARNING: Unexpected \"]\"\n") expectPrinted(t, "[|b]{}", "[b] {\n}\n") // "[|b]" is equivalent to "[b]" expectPrinted(t, "[*|b]{}", "[*|b] {\n}\n") expectPrinted(t, "[a|b]{}", "[a|b] {\n}\n") expectPrinted(t, "[a|b|=\"c\"]{}", "[a|b|=c] {\n}\n") expectPrinted(t, "[a|b |= \"c\"]{}", "[a|b|=c] {\n}\n") expectParseError(t, "[a||b] {}", "<stdin>: WARNING: Expected identifier but found \"|\"\n") expectParseError(t, "[* | b] {}", "<stdin>: WARNING: Expected \"|\" but found whitespace\n") expectParseError(t, "[a | b] {}", "<stdin>: WARNING: Expected \"=\" but found whitespace\n") expectPrinted(t, "[b=\"c\"] {}", "[b=c] {\n}\n") expectPrinted(t, "[b=\"c d\"] {}", "[b=\"c d\"] {\n}\n") expectPrinted(t, "[b=\"0c\"] {}", "[b=\"0c\"] {\n}\n") expectPrinted(t, "[b~=\"c\"] {}", "[b~=c] {\n}\n") expectPrinted(t, "[b^=\"c\"] {}", "[b^=c] {\n}\n") expectPrinted(t, "[b$=\"c\"] {}", "[b$=c] {\n}\n") expectPrinted(t, "[b*=\"c\"] {}", "[b*=c] {\n}\n") expectPrinted(t, "[b|=\"c\"] {}", "[b|=c] {\n}\n") expectParseError(t, "[b?=\"c\"] {}", "<stdin>: WARNING: Expected \"]\" but found \"?\"\n") expectPrinted(t, "[b = \"c\"] {}", "[b=c] {\n}\n") expectPrinted(t, "[b ~= \"c\"] {}", "[b~=c] {\n}\n") expectPrinted(t, "[b ^= \"c\"] {}", "[b^=c] {\n}\n") expectPrinted(t, "[b $= \"c\"] {}", "[b$=c] {\n}\n") expectPrinted(t, "[b *= \"c\"] {}", "[b*=c] {\n}\n") expectPrinted(t, "[b |= \"c\"] {}", "[b|=c] {\n}\n") expectParseError(t, "[b ?= \"c\"] {}", "<stdin>: WARNING: Expected \"]\" but found \"?\"\n") expectPrinted(t, "[b = \"c\" i] {}", "[b=c i] {\n}\n") expectPrinted(t, "[b = \"c\" I] {}", "[b=c I] {\n}\n") expectPrinted(t, "[b = \"c\" s] {}", "[b=c s] {\n}\n") expectPrinted(t, "[b = \"c\" S] {}", "[b=c S] {\n}\n") expectParseError(t, "[b i] {}", "<stdin>: WARNING: Expected \"]\" but found \"i\"\n<stdin>: WARNING: Unexpected \"]\"\n") expectParseError(t, "[b I] {}", "<stdin>: WARNING: Expected \"]\" but found \"I\"\n<stdin>: WARNING: Unexpected \"]\"\n") expectParseError(t, "[b s] {}", "<stdin>: WARNING: Expected \"]\" but found \"s\"\n<stdin>: WARNING: Unexpected \"]\"\n") expectParseError(t, "[b S] {}", "<stdin>: WARNING: Expected \"]\" but found \"S\"\n<stdin>: WARNING: Unexpected \"]\"\n") expectPrinted(t, "|b {}", "|b {\n}\n") expectPrinted(t, "|* {}", "|* {\n}\n") expectPrinted(t, "a|b {}", "a|b {\n}\n") expectPrinted(t, "a|* {}", "a|* {\n}\n") expectPrinted(t, "*|b {}", "*|b {\n}\n") expectPrinted(t, "*|* {}", "*|* {\n}\n") expectParseError(t, "a||b {}", "<stdin>: WARNING: Expected identifier but found \"|\"\n") expectPrinted(t, "a+b {}", "a + b {\n}\n") expectPrinted(t, "a>b {}", "a > b {\n}\n") expectPrinted(t, "a+b {}", "a + b {\n}\n") expectPrinted(t, "a~b {}", "a ~ b {\n}\n") expectPrinted(t, "a + b {}", "a + b {\n}\n") expectPrinted(t, "a > b {}", "a > b {\n}\n") expectPrinted(t, "a + b {}", "a + b {\n}\n") expectPrinted(t, "a ~ b {}", "a ~ b {\n}\n") expectPrinted(t, "::b {}", "::b {\n}\n") expectPrinted(t, "*::b {}", "*::b {\n}\n") expectPrinted(t, "a::b {}", "a::b {\n}\n") expectPrinted(t, "::b(c) {}", "::b(c) {\n}\n") expectPrinted(t, "*::b(c) {}", "*::b(c) {\n}\n") expectPrinted(t, "a::b(c) {}", "a::b(c) {\n}\n") expectPrinted(t, "a:b:c {}", "a:b:c {\n}\n") expectPrinted(t, "a:b(:c) {}", "a:b(:c) {\n}\n") expectPrinted(t, "a: b {}", "a: b {\n}\n") expectPrinted(t, "#id {}", "#id {\n}\n") expectPrinted(t, "#--0 {}", "#--0 {\n}\n") expectPrinted(t, "#\\-0 {}", "#\\-0 {\n}\n") expectPrinted(t, "#\\30 {}", "#\\30 {\n}\n") expectPrinted(t, "div#id {}", "div#id {\n}\n") expectPrinted(t, "div#--0 {}", "div#--0 {\n}\n") expectPrinted(t, "div#\\-0 {}", "div#\\-0 {\n}\n") expectPrinted(t, "div#\\30 {}", "div#\\30 {\n}\n") expectParseError(t, "#0 {}", "<stdin>: WARNING: Unexpected \"#0\"\n") expectParseError(t, "#-0 {}", "<stdin>: WARNING: Unexpected \"#-0\"\n") expectParseError(t, "div#0 {}", "<stdin>: WARNING: Unexpected \"#0\"\n") expectParseError(t, "div#-0 {}", "<stdin>: WARNING: Unexpected \"#-0\"\n") expectPrinted(t, "div::before::after::selection::first-line::first-letter {color:red}", "div::before::after::selection::first-line::first-letter {\n color: red;\n}\n") expectPrintedMangle(t, "div::before::after::selection::first-line::first-letter {color:red}", "div:before:after::selection:first-line:first-letter {\n color: red;\n}\n") // Make sure '-' and '\\' consume an ident-like token instead of a name expectParseError(t, "_:-ms-lang(x) {}", "") expectParseError(t, "_:\\ms-lang(x) {}", "") } func TestNestedSelector(t *testing.T) { expectPrinted(t, "& {}", "& {\n}\n") expectPrinted(t, "& b {}", "& b {\n}\n") expectPrinted(t, "&:b {}", "&:b {\n}\n") expectPrinted(t, "&* {}", "&* {\n}\n") expectPrinted(t, "&|b {}", "&|b {\n}\n") expectPrinted(t, "&*|b {}", "&*|b {\n}\n") expectPrinted(t, "&a|b {}", "&a|b {\n}\n") expectPrinted(t, "&[a] {}", "&[a] {\n}\n") expectPrinted(t, "a { & {} }", "a {\n & {\n }\n}\n") expectPrinted(t, "a { & b {} }", "a {\n & b {\n }\n}\n") expectPrinted(t, "a { &:b {} }", "a {\n &:b {\n }\n}\n") expectPrinted(t, "a { &* {} }", "a {\n &* {\n }\n}\n") expectPrinted(t, "a { &|b {} }", "a {\n &|b {\n }\n}\n") expectPrinted(t, "a { &*|b {} }", "a {\n &*|b {\n }\n}\n") expectPrinted(t, "a { &a|b {} }", "a {\n &a|b {\n }\n}\n") expectPrinted(t, "a { &[b] {} }", "a {\n &[b] {\n }\n}\n") expectParseError(t, "a { & b, c {} }", "<stdin>: WARNING: Every selector in a nested style rule must start with \"&\"\n"+ "<stdin>: NOTE: This is a nested style rule because of the \"&\" here:\n") expectParseError(t, "a { & b, & c {} }", "") expectParseError(t, "a { b & {} }", "<stdin>: WARNING: Expected \":\"\n") expectParseError(t, "a { @nest b & {} }", "") expectParseError(t, "a { @nest & b, c {} }", "<stdin>: WARNING: Every selector in a nested style rule must contain \"&\"\n"+ "<stdin>: NOTE: This is a nested style rule because of the \"@nest\" here:\n") expectParseError(t, "a { @nest b &, c {} }", "<stdin>: WARNING: Every selector in a nested style rule must contain \"&\"\n"+ "<stdin>: NOTE: This is a nested style rule because of the \"@nest\" here:\n") expectPrinted(t, "a { @nest b & { color: red } }", "a {\n @nest b & {\n color: red;\n }\n}\n") expectPrinted(t, "a { @nest b& { color: red } }", "a {\n @nest b& {\n color: red;\n }\n}\n") expectPrinted(t, "a { @nest b&[c] { color: red } }", "a {\n @nest b[c]& {\n color: red;\n }\n}\n") expectPrinted(t, "a { @nest &[c] { color: red } }", "a {\n @nest &[c] {\n color: red;\n }\n}\n") expectPrinted(t, "a { @nest [c]& { color: red } }", "a {\n @nest [c]& {\n color: red;\n }\n}\n") expectPrintedMinify(t, "a { @nest b & { color: red } }", "a{@nest b &{color:red}}") expectPrintedMinify(t, "a { @nest b& { color: red } }", "a{@nest b&{color:red}}") // Don't drop "@nest" for invalid rules expectParseError(t, "a { @nest @invalid { color: red } }", "<stdin>: WARNING: Unexpected \"@invalid\"\n") expectPrinted(t, "a { @nest @invalid { color: red } }", "a {\n @nest @invalid {\n color: red;\n }\n}\n") // Check removal of "@nest" when minifying expectPrinted(t, "a { @nest & b, & c { color: red } }", "a {\n @nest & b,\n & c {\n color: red;\n }\n}\n") expectPrintedMangle(t, "a { @nest & b, & c { color: red } }", "a {\n & b,\n & c {\n color: red;\n }\n}\n") expectPrintedMangle(t, "a { @nest b &, & c { color: red } }", "a {\n @nest b &,\n & c {\n color: red;\n }\n}\n") expectPrintedMangle(t, "a { @nest & b, c & { color: red } }", "a {\n @nest & b,\n c & {\n color: red;\n }\n}\n") outside := "<stdin>: WARNING: CSS nesting syntax cannot be used outside of a style rule\n" expectParseError(t, "& a {}", outside) expectParseError(t, "@nest a & {}", outside) expectParseError(t, "@media screen { & a {} }", outside) expectParseError(t, "@media screen { @nest a & {} }", outside) } func TestBadQualifiedRules(t *testing.T) { expectParseError(t, "$bad: rule;", "<stdin>: WARNING: Unexpected \"$\"\n") expectParseError(t, "$bad { color: red }", "<stdin>: WARNING: Unexpected \"$\"\n") expectParseError(t, "a { div.major { color: blue } color: red }", "<stdin>: WARNING: Expected \":\" but found \".\"\n") expectParseError(t, "a { div:hover { color: blue } color: red }", "") expectParseError(t, "a { div:hover { color: blue }; color: red }", "") expectParseError(t, "a { div:hover { color: blue } ; color: red }", "") expectParseError(t, "! { x: {} }", "<stdin>: WARNING: Unexpected \"!\"\n") } func TestAtRule(t *testing.T) { expectPrinted(t, "@unknown", "@unknown;\n") expectPrinted(t, "@unknown;", "@unknown;\n") expectPrinted(t, "@unknown{}", "@unknown {}\n") expectPrinted(t, "@unknown x;", "@unknown x;\n") expectPrinted(t, "@unknown{\na: b;\nc: d;\n}", "@unknown { a: b; c: d; }\n") expectParseError(t, "@unknown", "<stdin>: WARNING: Expected \"{\" but found end of file\n") expectParseError(t, "@", "<stdin>: WARNING: Unexpected \"@\"\n") expectParseError(t, "@;", "<stdin>: WARNING: Unexpected \"@\"\n") expectParseError(t, "@{}", "<stdin>: WARNING: Unexpected \"@\"\n") expectPrinted(t, "@viewport { width: 100vw }", "@viewport {\n width: 100vw;\n}\n") expectPrinted(t, "@-ms-viewport { width: 100vw }", "@-ms-viewport {\n width: 100vw;\n}\n") expectPrinted(t, "@document url(\"https://www.example.com/\") { h1 { color: green } }", "@document url(https://www.example.com/) {\n h1 {\n color: green;\n }\n}\n") expectPrinted(t, "@-moz-document url-prefix() { h1 { color: green } }", "@-moz-document url-prefix() {\n h1 {\n color: green;\n }\n}\n") // https://www.w3.org/TR/css-page-3/#syntax-page-selector expectPrinted(t, ` @page :first { margin: 0 } @page { @top-left-corner { content: 'tlc' } @top-left { content: 'tl' } @top-center { content: 'tc' } @top-right { content: 'tr' } @top-right-corner { content: 'trc' } @bottom-left-corner { content: 'blc' } @bottom-left { content: 'bl' } @bottom-center { content: 'bc' } @bottom-right { content: 'br' } @bottom-right-corner { content: 'brc' } @left-top { content: 'lt' } @left-middle { content: 'lm' } @left-bottom { content: 'lb' } @right-top { content: 'rt' } @right-middle { content: 'rm' } @right-bottom { content: 'rb' } } `, `@page :first { margin: 0; } @page { @top-left-corner { content: "tlc"; } @top-left { content: "tl"; } @top-center { content: "tc"; } @top-right { content: "tr"; } @top-right-corner { content: "trc"; } @bottom-left-corner { content: "blc"; } @bottom-left { content: "bl"; } @bottom-center { content: "bc"; } @bottom-right { content: "br"; } @bottom-right-corner { content: "brc"; } @left-top { content: "lt"; } @left-middle { content: "lm"; } @left-bottom { content: "lb"; } @right-top { content: "rt"; } @right-middle { content: "rm"; } @right-bottom { content: "rb"; } } `) } func TestAtCharset(t *testing.T) { expectPrinted(t, "@charset \"UTF-8\";", "@charset \"UTF-8\";\n") expectPrinted(t, "@charset 'UTF-8';", "@charset \"UTF-8\";\n") expectParseError(t, "@charset \"utf-8\";", "") expectParseError(t, "@charset \"Utf-8\";", "") expectParseError(t, "@charset \"UTF-8\";", "") expectParseError(t, "@charset \"US-ASCII\";", "<stdin>: WARNING: \"UTF-8\" will be used instead of unsupported charset \"US-ASCII\"\n") expectParseError(t, "@charset;", "<stdin>: WARNING: Expected whitespace but found \";\"\n") expectParseError(t, "@charset ;", "<stdin>: WARNING: Expected string token but found \";\"\n") expectParseError(t, "@charset\"UTF-8\";", "<stdin>: WARNING: Expected whitespace but found \"\\\"UTF-8\\\"\"\n") expectParseError(t, "@charset \"UTF-8\"", "<stdin>: WARNING: Expected \";\" but found end of file\n") expectParseError(t, "@charset url(UTF-8);", "<stdin>: WARNING: Expected string token but found \"url(UTF-8)\"\n") expectParseError(t, "@charset url(\"UTF-8\");", "<stdin>: WARNING: Expected string token but found \"url(\"\n") expectParseError(t, "@charset \"UTF-8\" ", "<stdin>: WARNING: Expected \";\" but found whitespace\n") expectParseError(t, "@charset \"UTF-8\"{}", "<stdin>: WARNING: Expected \";\" but found \"{\"\n") // https://drafts.csswg.org/css-transitions-2/#defining-before-change-style-the-starting-style-rule expectPrinted(t, ` @starting-style { h1 { background-color: transparent; } @layer foo { div { height: 100px; } } } `, `@starting-style { h1 { background-color: transparent; } @layer foo { div { height: 100px; } } } `) expectPrintedMinify(t, `@starting-style { h1 { background-color: transparent; } }`, "@starting-style{h1{background-color:transparent}}") } func TestAtImport(t *testing.T) { expectPrinted(t, "@import\"foo.css\";", "@import \"foo.css\";\n") expectPrinted(t, "@import \"foo.css\";", "@import \"foo.css\";\n") expectPrinted(t, "@import \"foo.css\" ;", "@import \"foo.css\";\n") expectPrinted(t, "@import url();", "@import \"\";\n") expectPrinted(t, "@import url(foo.css);", "@import \"foo.css\";\n") expectPrinted(t, "@import url(foo.css) ;", "@import \"foo.css\";\n") expectPrinted(t, "@import url(\"foo.css\");", "@import \"foo.css\";\n") expectPrinted(t, "@import url(\"foo.css\") ;", "@import \"foo.css\";\n") expectPrinted(t, "@import url(\"foo.css\") print;", "@import \"foo.css\" print;\n") expectPrinted(t, "@import url(\"foo.css\") screen and (orientation:landscape);", "@import \"foo.css\" screen and (orientation:landscape);\n") expectParseError(t, "@import;", "<stdin>: WARNING: Expected URL token but found \";\"\n") expectParseError(t, "@import ;", "<stdin>: WARNING: Expected URL token but found \";\"\n") expectParseError(t, "@import \"foo.css\"", "<stdin>: WARNING: Expected \";\" but found end of file\n") expectParseError(t, "@import url(\"foo.css\";", "<stdin>: WARNING: Expected \")\" but found \";\"\n") expectParseError(t, "@import noturl(\"foo.css\");", "<stdin>: WARNING: Expected URL token but found \"noturl(\"\n") expectParseError(t, "@import url(", `<stdin>: WARNING: Expected URL token but found bad URL token <stdin>: ERROR: Expected ")" to end URL token <stdin>: WARNING: Expected ";" but found end of file `) expectParseError(t, "@import \"foo.css\" {}", "<stdin>: WARNING: Expected \";\"\n") expectPrinted(t, "@import \"foo\"\na { color: red }\nb { color: blue }", "@import \"foo\" a { color: red }\nb {\n color: blue;\n}\n") expectParseError(t, "a { @import \"foo.css\" }", "<stdin>: WARNING: \"@import\" is only valid at the top level\n<stdin>: WARNING: Expected \";\"\n") expectPrinted(t, "a { @import \"foo.css\" }", "a {\n @import \"foo.css\";\n}\n") } func TestLegalComment(t *testing.T) { expectPrinted(t, "/*!*/@import \"x\";", "/*!*/\n@import \"x\";\n") expectPrinted(t, "/*!*/@charset \"UTF-8\";", "/*!*/\n@charset \"UTF-8\";\n") expectPrinted(t, "/*!*/ @import \"x\";", "/*!*/\n@import \"x\";\n") expectPrinted(t, "/*!*/ @charset \"UTF-8\";", "/*!*/\n@charset \"UTF-8\";\n") expectPrinted(t, "/*!*/ @charset \"UTF-8\"; @import \"x\";", "/*!*/\n@charset \"UTF-8\";\n@import \"x\";\n") expectPrinted(t, "/*!*/ @import \"x\"; @charset \"UTF-8\";", "/*!*/\n@import \"x\";\n@charset \"UTF-8\";\n") expectParseError(t, "/*!*/ @import \"x\";", "") expectParseError(t, "/*!*/ @charset \"UTF-8\";", "") expectParseError(t, "/*!*/ @charset \"UTF-8\"; @import \"x\";", "") expectParseError(t, "/*!*/ @import \"x\"; @charset \"UTF-8\";", "<stdin>: WARNING: \"@charset\" must be the first rule in the file\n"+ "<stdin>: NOTE: This rule cannot come before a \"@charset\" rule\n") expectPrinted(t, "@import \"x\";/*!*/", "@import \"x\";\n/*!*/\n") expectPrinted(t, "@charset \"UTF-8\";/*!*/", "@charset \"UTF-8\";\n/*!*/\n") expectPrinted(t, "@import \"x\"; /*!*/", "@import \"x\";\n/*!*/\n") expectPrinted(t, "@charset \"UTF-8\"; /*!*/", "@charset \"UTF-8\";\n/*!*/\n") expectPrinted(t, "/*! before */ a { --b: var(--c, /*!*/ /*!*/); } /*! after */\n", "/*! before */\na {\n --b: var(--c, );\n}\n/*! after */\n") } func TestAtKeyframes(t *testing.T) { expectPrinted(t, "@keyframes {}", "@keyframes \"\" {\n}\n") expectPrinted(t, "@keyframes name{}", "@keyframes name {\n}\n") expectPrinted(t, "@keyframes name {}", "@keyframes name {\n}\n") expectPrinted(t, "@keyframes name{0%,50%{color:red}25%,75%{color:blue}}", "@keyframes name {\n 0%, 50% {\n color: red;\n }\n 25%, 75% {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name { 0%, 50% { color: red } 25%, 75% { color: blue } }", "@keyframes name {\n 0%, 50% {\n color: red;\n }\n 25%, 75% {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name{from{color:red}to{color:blue}}", "@keyframes name {\n from {\n color: red;\n }\n to {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name { from { color: red } to { color: blue } }", "@keyframes name {\n from {\n color: red;\n }\n to {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name { from { color: red } }", "@keyframes name {\n from {\n color: red;\n }\n}\n") expectPrinted(t, "@keyframes name { 100% { color: red } }", "@keyframes name {\n 100% {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@keyframes name { from { color: red } }", "@keyframes name {\n 0% {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@keyframes name { 100% { color: red } }", "@keyframes name {\n to {\n color: red;\n }\n}\n") expectPrinted(t, "@-webkit-keyframes name {}", "@-webkit-keyframes name {\n}\n") expectPrinted(t, "@-moz-keyframes name {}", "@-moz-keyframes name {\n}\n") expectPrinted(t, "@-ms-keyframes name {}", "@-ms-keyframes name {\n}\n") expectPrinted(t, "@-o-keyframes name {}", "@-o-keyframes name {\n}\n") expectParseError(t, "@keyframes {}", "<stdin>: WARNING: Expected identifier but found \"{\"\n") expectParseError(t, "@keyframes 'name' {}", "<stdin>: WARNING: Expected identifier but found \"'name'\"\n") expectParseError(t, "@keyframes name { 0% 100% {} }", "<stdin>: WARNING: Expected \",\" but found \"100%\"\n") expectParseError(t, "@keyframes name { {} 0% {} }", "<stdin>: WARNING: Expected percentage but found \"{\"\n") expectParseError(t, "@keyframes name { 100 {} }", "<stdin>: WARNING: Expected percentage but found \"100\"\n") expectParseError(t, "@keyframes name { into {} }", "<stdin>: WARNING: Expected percentage but found \"into\"\n") expectParseError(t, "@keyframes name { 1,2 {} }", "<stdin>: WARNING: Expected percentage but found \"1\"\n") expectParseError(t, "@keyframes name { 1, 2 {} }", "<stdin>: WARNING: Expected percentage but found \"1\"\n") expectParseError(t, "@keyframes name { 1 ,2 {} }", "<stdin>: WARNING: Expected percentage but found \"1\"\n") expectParseError(t, "@keyframes name { 1%, {} }", "<stdin>: WARNING: Expected percentage but found \"{\"\n") expectParseError(t, "@keyframes name { 1%, x {} }", "<stdin>: WARNING: Expected percentage but found \"x\"\n") expectParseError(t, "@keyframes name { 1%, ! {} }", "<stdin>: WARNING: Expected percentage but found \"!\"\n") expectParseError(t, "@keyframes name { .x {} }", "<stdin>: WARNING: Expected percentage but found \".\"\n") expectParseError(t, "@keyframes name { {} }", "<stdin>: WARNING: Expected percentage but found \"{\"\n") expectParseError(t, "@keyframes name { 1% }", "<stdin>: WARNING: Expected \"{\" but found \"}\"\n") expectParseError(t, "@keyframes name { 1%", "<stdin>: WARNING: Expected \"{\" but found end of file\n") expectParseError(t, "@keyframes name { 1%,,2% {} }", "<stdin>: WARNING: Expected percentage but found \",\"\n") expectParseError(t, "@keyframes name {", "<stdin>: WARNING: Expected \"}\" but found end of file\n") expectPrinted(t, "@keyframes x { 1%, {} } @keyframes z { 1% {} }", "@keyframes x { 1%, {} }\n@keyframes z {\n 1% {\n }\n}\n") expectPrinted(t, "@keyframes x { .y {} } @keyframes z { 1% {} }", "@keyframes x { .y {} }\n@keyframes z {\n 1% {\n }\n}\n") expectPrinted(t, "@keyframes x { x {} } @keyframes z { 1% {} }", "@keyframes x {\n x {\n }\n}\n@keyframes z {\n 1% {\n }\n}\n") expectPrinted(t, "@keyframes x { {} } @keyframes z { 1% {} }", "@keyframes x { {} }\n@keyframes z {\n 1% {\n }\n}\n") expectPrinted(t, "@keyframes x { 1% {}", "@keyframes x { 1% {} }\n") expectPrinted(t, "@keyframes x { 1% {", "@keyframes x { 1% {} }\n") expectPrinted(t, "@keyframes x { 1%", "@keyframes x { 1% }\n") expectPrinted(t, "@keyframes x {", "@keyframes x {}\n") } func TestAtRuleValidation(t *testing.T) { expectParseError(t, "a {} b {} c {} @charset \"UTF-8\";", "<stdin>: WARNING: \"@charset\" must be the first rule in the file\n"+ "<stdin>: NOTE: This rule cannot come before a \"@charset\" rule\n") expectParseError(t, "a {} b {} c {} @import \"foo\";", "<stdin>: WARNING: All \"@import\" rules must come first\n"+ "<stdin>: NOTE: This rule cannot come before an \"@import\" rule\n") } func TestAtLayer(t *testing.T) { expectParseError(t, "@layer a, b;", "") expectParseError(t, "@layer a {}", "") expectParseError(t, "@layer {}", "") expectParseError(t, "@layer a, b {}", "<stdin>: WARNING: Expected \";\"\n") expectParseError(t, "@layer;", "<stdin>: WARNING: Unexpected \";\"\n") expectParseError(t, "@layer , b {}", "<stdin>: WARNING: Unexpected \",\"\n") expectParseError(t, "@layer a", "<stdin>: WARNING: Expected \";\" but found end of file\n") expectParseError(t, "@layer a { @layer b }", "<stdin>: WARNING: Expected \";\"\n") expectParseError(t, "@layer a b", "<stdin>: WARNING: Unexpected \"b\"\n<stdin>: WARNING: Expected \";\" but found end of file\n") expectParseError(t, "@layer a b ;", "<stdin>: WARNING: Unexpected \"b\"\n") expectParseError(t, "@layer a b {}", "<stdin>: WARNING: Unexpected \"b\"\n") expectPrinted(t, "@layer a, b;", "@layer a, b;\n") expectPrinted(t, "@layer a {}", "@layer a {\n}\n") expectPrinted(t, "@layer {}", "@layer {\n}\n") expectPrinted(t, "@layer foo { div { color: red } }", "@layer foo {\n div {\n color: red;\n }\n}\n") // Check semicolon error recovery expectPrinted(t, "@layer", "@layer;\n") expectPrinted(t, "@layer a", "@layer a;\n") expectPrinted(t, "@layer a { @layer }", "@layer a {\n @layer;\n}\n") expectPrinted(t, "@layer a { @layer b }", "@layer a {\n @layer b;\n}\n") // Check mangling expectPrintedMangle(t, "@layer foo { div {} }", "@layer foo;\n") expectPrintedMangle(t, "@layer foo { div { color: yellow } }", "@layer foo {\n div {\n color: #ff0;\n }\n}\n") expectPrintedMangle(t, "@layer a { @layer b {} }", "@layer a.b;\n") expectPrintedMangle(t, "@layer a { @layer {} }", "@layer a {\n @layer {\n }\n}\n") expectPrintedMangle(t, "@layer { @layer a {} }", "@layer {\n @layer a;\n}\n") expectPrintedMangle(t, "@layer a.b { @layer c.d {} }", "@layer a.b.c.d;\n") expectPrintedMangle(t, "@layer a.b { @layer c.d {} @layer e.f {} }", "@layer a.b {\n @layer c.d;\n @layer e.f;\n}\n") expectPrintedMangle(t, "@layer a.b { @layer c.d { e { f: g } } }", "@layer a.b.c.d {\n e {\n f: g;\n }\n}\n") // Invalid layer names should not be merged, since that causes the rule to // become invalid. It would be a change in semantics if we merged an invalid // rule with a valid rule since then the other valid rule would be invalid. expectParseError(t, "@layer foo { @layer initial; }", "<stdin>: WARNING: \"initial\" cannot be used as a layer name\n") expectParseError(t, "@layer foo { @layer inherit; }", "<stdin>: WARNING: \"inherit\" cannot be used as a layer name\n") expectParseError(t, "@layer foo { @layer unset; }", "<stdin>: WARNING: \"unset\" cannot be used as a layer name\n") expectParseError(t, "@layer initial { @layer foo; }", "<stdin>: WARNING: \"initial\" cannot be used as a layer name\n") expectParseError(t, "@layer inherit { @layer foo; }", "<stdin>: WARNING: \"inherit\" cannot be used as a layer name\n") expectParseError(t, "@layer unset { @layer foo; }", "<stdin>: WARNING: \"unset\" cannot be used as a layer name\n") expectPrintedMangle(t, "@layer foo { @layer initial { a { b: c } } }", "@layer foo {\n @layer initial {\n a {\n b: c;\n }\n }\n}\n") expectPrintedMangle(t, "@layer initial { @layer foo { a { b: c } } }", "@layer initial {\n @layer foo {\n a {\n b: c;\n }\n }\n}\n") // Order matters here. Do not drop the first "@layer a;" or the order will be changed. expectPrintedMangle(t, "@layer a; @layer b; @layer a;", "@layer a;\n@layer b;\n@layer a;\n") // Validate ordering with "@layer" and "@import" expectParseError(t, "@layer a; @import url(b);", "") expectParseError(t, "@layer a; @layer b; @import url(c);", "") expectParseError(t, "@layer a {} @import url(b);", "<stdin>: WARNING: All \"@import\" rules must come first\n<stdin>: NOTE: This rule cannot come before an \"@import\" rule\n") expectParseError(t, "@import url(a); @layer b; @import url(c);", "<stdin>: WARNING: All \"@import\" rules must come first\n<stdin>: NOTE: This rule cannot come before an \"@import\" rule\n") expectParseError(t, "@layer a; @charset \"UTF-8\";", "<stdin>: WARNING: \"@charset\" must be the first rule in the file\n<stdin>: NOTE: This rule cannot come before a \"@charset\" rule\n") } func TestEmptyRule(t *testing.T) { expectPrinted(t, "div {}", "div {\n}\n") expectPrinted(t, "@media screen {}", "@media screen {\n}\n") expectPrinted(t, "@page { @top-left {} }", "@page {\n @top-left {\n }\n}\n") expectPrinted(t, "@keyframes test { from {} to {} }", "@keyframes test {\n from {\n }\n to {\n }\n}\n") expectPrintedMangle(t, "div {}", "") expectPrintedMangle(t, "@media screen {}", "") expectPrintedMangle(t, "@page { @top-left {} }", "") expectPrintedMangle(t, "@keyframes test { from {} to {} }", "@keyframes test {\n}\n") expectPrinted(t, "$invalid {}", "$invalid {\n}\n") expectPrinted(t, "@page { color: red; @top-left {} }", "@page {\n color: red;\n @top-left {\n }\n}\n") expectPrinted(t, "@keyframes test { from {} to { color: red } }", "@keyframes test {\n from {\n }\n to {\n color: red;\n }\n}\n") expectPrinted(t, "@keyframes test { from { color: red } to {} }", "@keyframes test {\n from {\n color: red;\n }\n to {\n }\n}\n") expectPrintedMangle(t, "$invalid {}", "$invalid {\n}\n") expectPrintedMangle(t, "@page { color: red; @top-left {} }", "@page {\n color: red;\n}\n") expectPrintedMangle(t, "@keyframes test { from {} to { color: red } }", "@keyframes test {\n to {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@keyframes test { from { color: red } to {} }", "@keyframes test {\n 0% {\n color: red;\n }\n}\n") expectPrintedMangleMinify(t, "$invalid {}", "$invalid{}") expectPrintedMangleMinify(t, "@page { color: red; @top-left {} }", "@page{color:red}") expectPrintedMangleMinify(t, "@keyframes test { from {} to { color: red } }", "@keyframes test{to{color:red}}") expectPrintedMangleMinify(t, "@keyframes test { from { color: red } to {} }", "@keyframes test{0%{color:red}}") } func TestMarginAndPaddingAndInset(t *testing.T) { for _, x := range []string{"margin", "padding", "inset"} { xTop := x + "-top" xRight := x + "-right" xBottom := x + "-bottom" xLeft := x + "-left" if x == "inset" { xTop = "top" xRight = "right" xBottom = "bottom" xLeft = "left" } expectPrinted(t, "a { "+x+": 0 1px 0 1px }", "a {\n "+x+": 0 1px 0 1px;\n}\n") expectPrinted(t, "a { "+x+": 0 1px 0px 1px }", "a {\n "+x+": 0 1px 0px 1px;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 0px }", "a {\n "+xTop+": 0;\n}\n") expectPrintedMangle(t, "a { "+xRight+": 0px }", "a {\n "+xRight+": 0;\n}\n") expectPrintedMangle(t, "a { "+xBottom+": 0px }", "a {\n "+xBottom+": 0;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 0px }", "a {\n "+xLeft+": 0;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 1px }", "a {\n "+xTop+": 1px;\n}\n") expectPrintedMangle(t, "a { "+xRight+": 1px }", "a {\n "+xRight+": 1px;\n}\n") expectPrintedMangle(t, "a { "+xBottom+": 1px }", "a {\n "+xBottom+": 1px;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px }", "a {\n "+xLeft+": 1px;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 0 0 }", "a {\n "+x+": 0 1px 0 0;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 2px 1px }", "a {\n "+x+": 0 1px 2px;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 0 1px }", "a {\n "+x+": 0 1px;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 0 0 0 }", "a {\n "+x+": 0;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 0 0 0 !important }", "a {\n "+x+": 0 !important;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 0px 1px }", "a {\n "+x+": 0 1px;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1 0px 1px }", "a {\n "+x+": 0 1 0px 1px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xTop+": 5px }", "a {\n "+x+": 5px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xRight+": 5px }", "a {\n "+x+": 1px 5px 3px 4px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xBottom+": 5px }", "a {\n "+x+": 1px 2px 5px 4px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xLeft+": 5px }", "a {\n "+x+": 1px 2px 3px 5px;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 5px; "+x+": 1px 2px 3px 4px }", "a {\n "+x+": 1px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { "+xRight+": 5px; "+x+": 1px 2px 3px 4px }", "a {\n "+x+": 1px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { "+xBottom+": 5px; "+x+": 1px 2px 3px 4px }", "a {\n "+x+": 1px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 5px; "+x+": 1px 2px 3px 4px }", "a {\n "+x+": 1px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 1px; "+xTop+": 2px }", "a {\n "+xTop+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xRight+": 1px; "+xRight+": 2px }", "a {\n "+xRight+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xBottom+": 1px; "+xBottom+": 2px }", "a {\n "+xBottom+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px; "+xLeft+": 2px }", "a {\n "+xLeft+": 2px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px; "+x+": 2px !important }", "a {\n "+x+": 1px;\n "+x+": 2px !important;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 1px; "+xTop+": 2px !important }", "a {\n "+xTop+": 1px;\n "+xTop+": 2px !important;\n}\n") expectPrintedMangle(t, "a { "+xRight+": 1px; "+xRight+": 2px !important }", "a {\n "+xRight+": 1px;\n "+xRight+": 2px !important;\n}\n") expectPrintedMangle(t, "a { "+xBottom+": 1px; "+xBottom+": 2px !important }", "a {\n "+xBottom+": 1px;\n "+xBottom+": 2px !important;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px; "+xLeft+": 2px !important }", "a {\n "+xLeft+": 1px;\n "+xLeft+": 2px !important;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px !important; "+x+": 2px }", "a {\n "+x+": 1px !important;\n "+x+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 1px !important; "+xTop+": 2px }", "a {\n "+xTop+": 1px !important;\n "+xTop+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xRight+": 1px !important; "+xRight+": 2px }", "a {\n "+xRight+": 1px !important;\n "+xRight+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xBottom+": 1px !important; "+xBottom+": 2px }", "a {\n "+xBottom+": 1px !important;\n "+xBottom+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px !important; "+xLeft+": 2px }", "a {\n "+xLeft+": 1px !important;\n "+xLeft+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 1px; "+xTop+": }", "a {\n "+xTop+": 1px;\n "+xTop+":;\n}\n") expectPrintedMangle(t, "a { "+xTop+": 1px; "+xTop+": 2px 3px }", "a {\n "+xTop+": 1px;\n "+xTop+": 2px 3px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xLeft+": -4px; "+xRight+": -2px }", "a {\n "+x+": 1px -2px 3px -4px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px; "+xTop+": 5px }", "a {\n "+x+": 5px 2px 1px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px; "+xTop+": 5px }", "a {\n "+x+": 5px 1px 1px;\n}\n") // This doesn't collapse because if the "calc" has an error it // will be ignored and the original rule will show through expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xRight+": calc(1px + var(--x)) }", "a {\n "+x+": 1px 2px 3px 4px;\n "+xRight+": calc(1px + var(--x));\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px; "+xRight+": 2px; "+xTop+": 3px; "+xBottom+": 4px }", "a {\n "+x+": 3px 2px 4px 1px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px; "+xRight+": 5px !important }", "a {\n "+x+": 1px 2px 3px 4px;\n "+xRight+": 5px !important;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px 2px 3px 4px !important; "+xRight+": 5px }", "a {\n "+x+": 1px 2px 3px 4px !important;\n "+xRight+": 5px;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px !important; "+xRight+": 2px; "+xTop+": 3px !important; "+xBottom+": 4px }", "a {\n "+xLeft+": 1px !important;\n "+xRight+": 2px;\n "+xTop+": 3px !important;\n "+xBottom+": 4px;\n}\n") // This should not be changed because "--x" and "--z" could be empty expectPrintedMangle(t, "a { "+x+": var(--x) var(--y) var(--z) var(--y) }", "a {\n "+x+": var(--x) var(--y) var(--z) var(--y);\n}\n") // Don't merge different units expectPrintedMangle(t, "a { "+x+": 1px; "+x+": 2px; }", "a {\n "+x+": 2px;\n}\n") expectPrintedMangle(t, "a { "+x+": 1px; "+x+": 2vw; }", "a {\n "+x+": 1px;\n "+x+": 2vw;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px; "+xLeft+": 2px; }", "a {\n "+xLeft+": 2px;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1px; "+xLeft+": 2vw; }", "a {\n "+xLeft+": 1px;\n "+xLeft+": 2vw;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 2cm 3%; "+x+": 4px; }", "a {\n "+x+": 4px;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 2cm 3%; "+x+": 4vw; }", "a {\n "+x+": 0 1px 2cm 3%;\n "+x+": 4vw;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 2cm 3%; "+xLeft+": 4px; }", "a {\n "+x+": 0 1px 2cm 4px;\n}\n") expectPrintedMangle(t, "a { "+x+": 0 1px 2cm 3%; "+xLeft+": 4vw; }", "a {\n "+x+": 0 1px 2cm 3%;\n "+xLeft+": 4vw;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1Q; "+xRight+": 2Q; "+xTop+": 3Q; "+xBottom+": 4Q; }", "a {\n "+x+": 3Q 2Q 4Q 1Q;\n}\n") expectPrintedMangle(t, "a { "+xLeft+": 1Q; "+xRight+": 2Q; "+xTop+": 3Q; "+xBottom+": 0; }", "a {\n "+xLeft+": 1Q;\n "+xRight+": 2Q;\n "+xTop+": 3Q;\n "+xBottom+": 0;\n}\n") } // "auto" is the only keyword allowed in a quad, and only for "margin" and "inset" not for "padding" expectPrintedMangle(t, "a { margin: 1px auto 3px 4px; margin-left: auto }", "a {\n margin: 1px auto 3px;\n}\n") expectPrintedMangle(t, "a { inset: 1px auto 3px 4px; left: auto }", "a {\n inset: 1px auto 3px;\n}\n") expectPrintedMangle(t, "a { padding: 1px auto 3px 4px; padding-left: auto }", "a {\n padding: 1px auto 3px 4px;\n padding-left: auto;\n}\n") expectPrintedMangle(t, "a { margin: auto; margin-left: 1px }", "a {\n margin: auto auto auto 1px;\n}\n") expectPrintedMangle(t, "a { inset: auto; left: 1px }", "a {\n inset: auto auto auto 1px;\n}\n") expectPrintedMangle(t, "a { padding: auto; padding-left: 1px }", "a {\n padding: auto;\n padding-left: 1px;\n}\n") expectPrintedMangle(t, "a { margin: inherit; margin-left: 1px }", "a {\n margin: inherit;\n margin-left: 1px;\n}\n") expectPrintedMangle(t, "a { inset: inherit; left: 1px }", "a {\n inset: inherit;\n left: 1px;\n}\n") expectPrintedMangle(t, "a { padding: inherit; padding-left: 1px }", "a {\n padding: inherit;\n padding-left: 1px;\n}\n") expectPrintedLowerMangle(t, "a { top: 0; right: 0; bottom: 0; left: 0; }", "a {\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n}\n") } func TestBorderRadius(t *testing.T) { expectPrinted(t, "a { border-top-left-radius: 0 0 }", "a {\n border-top-left-radius: 0 0;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 0 0 }", "a {\n border-top-left-radius: 0;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 0 0px }", "a {\n border-top-left-radius: 0;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 0 1px }", "a {\n border-top-left-radius: 0 1px;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 0; border-radius: 1px }", "a {\n border-radius: 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px 2px 3px 4px }", "a {\n border-radius: 1px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px 2px 1px 3px }", "a {\n border-radius: 1px 2px 1px 3px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px 2px 3px 2px }", "a {\n border-radius: 1px 2px 3px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px 2px 1px 2px }", "a {\n border-radius: 1px 2px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px 1px 1px 1px }", "a {\n border-radius: 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px 2px 3px 4px }", "a {\n border-radius: 0 / 1px 2px 3px 4px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px 2px 1px 3px }", "a {\n border-radius: 0 / 1px 2px 1px 3px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px 2px 3px 2px }", "a {\n border-radius: 0 / 1px 2px 3px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px 2px 1px 2px }", "a {\n border-radius: 0 / 1px 2px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px 1px 1px 1px }", "a {\n border-radius: 0 / 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px 2px; border-top-left-radius: 3px; }", "a {\n border-radius: 3px 2px 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px; border-top-left-radius: 3px; }", "a {\n border-radius: 3px 1px 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px; border-top-left-radius: 3px; }", "a {\n border-radius: 3px 0 0 / 3px 1px 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: 0/1px 2px; border-top-left-radius: 3px; }", "a {\n border-radius: 3px 0 0 / 3px 2px 1px;\n}\n") for _, x := range []string{"", "-top-left", "-top-right", "-bottom-left", "-bottom-right"} { y := "border" + x + "-radius" expectPrintedMangle(t, "a { "+y+": 1px; "+y+": 2px }", "a {\n "+y+": 2px;\n}\n") expectPrintedMangle(t, "a { "+y+": 1px !important; "+y+": 2px }", "a {\n "+y+": 1px !important;\n "+y+": 2px;\n}\n") expectPrintedMangle(t, "a { "+y+": 1px; "+y+": 2px !important }", "a {\n "+y+": 1px;\n "+y+": 2px !important;\n}\n") expectPrintedMangle(t, "a { "+y+": 1px !important; "+y+": 2px !important }", "a {\n "+y+": 2px !important;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px; "+y+": 2px !important; }", "a {\n border-radius: 1px;\n "+y+": 2px !important;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px !important; "+y+": 2px; }", "a {\n border-radius: 1px !important;\n "+y+": 2px;\n}\n") } expectPrintedMangle(t, "a { border-top-left-radius: ; border-radius: 1px }", "a {\n border-top-left-radius:;\n border-radius: 1px;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 1px; border-radius: / }", "a {\n border-top-left-radius: 1px;\n border-radius: /;\n}\n") expectPrintedMangleMinify(t, "a { border-radius: 1px 2px 3px 4px; border-top-right-radius: 5px; }", "a{border-radius:1px 5px 3px 4px}") expectPrintedMangleMinify(t, "a { border-radius: 1px 2px 3px 4px; border-top-right-radius: 5px 6px; }", "a{border-radius:1px 5px 3px 4px/1px 6px 3px 4px}") // These should not be changed because "--x" and "--z" could be empty expectPrintedMangle(t, "a { border-radius: var(--x) var(--y) var(--z) var(--y) }", "a {\n border-radius: var(--x) var(--y) var(--z) var(--y);\n}\n") expectPrintedMangle(t, "a { border-radius: 0 / var(--x) var(--y) var(--z) var(--y) }", "a {\n border-radius: 0 / var(--x) var(--y) var(--z) var(--y);\n}\n") // "inherit" should not be merged expectPrintedMangle(t, "a { border-radius: 1px; border-top-left-radius: 0 }", "a {\n border-radius: 0 1px 1px;\n}\n") expectPrintedMangle(t, "a { border-radius: inherit; border-top-left-radius: 0 }", "a {\n border-radius: inherit;\n border-top-left-radius: 0;\n}\n") expectPrintedMangle(t, "a { border-radius: 0; border-top-left-radius: inherit }", "a {\n border-radius: 0;\n border-top-left-radius: inherit;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 0; border-radius: inherit }", "a {\n border-top-left-radius: 0;\n border-radius: inherit;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: inherit; border-radius: 0 }", "a {\n border-top-left-radius: inherit;\n border-radius: 0;\n}\n") // Don't merge different units expectPrintedMangle(t, "a { border-radius: 1px; border-radius: 2px; }", "a {\n border-radius: 2px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1px; border-top-left-radius: 2px; }", "a {\n border-radius: 2px 1px 1px;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 1px; border-radius: 2px; }", "a {\n border-radius: 2px;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 1px; border-top-left-radius: 2px; }", "a {\n border-top-left-radius: 2px;\n}\n") expectPrintedMangle(t, "a { border-radius: 1rem; border-radius: 1vw; }", "a {\n border-radius: 1rem;\n border-radius: 1vw;\n}\n") expectPrintedMangle(t, "a { border-radius: 1rem; border-top-left-radius: 1vw; }", "a {\n border-radius: 1rem;\n border-top-left-radius: 1vw;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 1rem; border-radius: 1vw; }", "a {\n border-top-left-radius: 1rem;\n border-radius: 1vw;\n}\n") expectPrintedMangle(t, "a { border-top-left-radius: 1rem; border-top-left-radius: 1vw; }", "a {\n border-top-left-radius: 1rem;\n border-top-left-radius: 1vw;\n}\n") expectPrintedMangle(t, "a { border-radius: 0; border-top-left-radius: 2px; }", "a {\n border-radius: 2px 0 0;\n}\n") expectPrintedMangle(t, "a { border-radius: 0; border-top-left-radius: 2rem; }", "a {\n border-radius: 0;\n border-top-left-radius: 2rem;\n}\n") } func TestBoxShadow(t *testing.T) { expectPrinted(t, "a { box-shadow: inset 0px 0px 0px 0px black }", "a {\n box-shadow: inset 0px 0px 0px 0px black;\n}\n") expectPrintedMangle(t, "a { box-shadow: 0px 0px 0px 0px inset black }", "a {\n box-shadow: 0 0 inset #000;\n}\n") expectPrintedMangle(t, "a { box-shadow: 0px 0px 0px 0px black inset }", "a {\n box-shadow: 0 0 #000 inset;\n}\n") expectPrintedMangle(t, "a { box-shadow: black 0px 0px 0px 0px inset }", "a {\n box-shadow: #000 0 0 inset;\n}\n") expectPrintedMangle(t, "a { box-shadow: inset 0px 0px 0px 0px black }", "a {\n box-shadow: inset 0 0 #000;\n}\n") expectPrintedMangle(t, "a { box-shadow: inset black 0px 0px 0px 0px }", "a {\n box-shadow: inset #000 0 0;\n}\n") expectPrintedMangle(t, "a { box-shadow: black inset 0px 0px 0px 0px }", "a {\n box-shadow: #000 inset 0 0;\n}\n") expectPrintedMangle(t, "a { box-shadow: yellow 1px 0px 0px 1px inset }", "a {\n box-shadow: #ff0 1px 0 0 1px inset;\n}\n") expectPrintedMangle(t, "a { box-shadow: yellow 1px 0px 1px 0px inset }", "a {\n box-shadow: #ff0 1px 0 1px inset;\n}\n") expectPrintedMangle(t, "a { box-shadow: rebeccapurple, yellow, black }", "a {\n box-shadow:\n #639,\n #ff0,\n #000;\n}\n") expectPrintedMangle(t, "a { box-shadow: 0px 0px 0px var(--foo) black }", "a {\n box-shadow: 0 0 0 var(--foo) #000;\n}\n") expectPrintedMangle(t, "a { box-shadow: 0px 0px 0px 0px var(--foo) black }", "a {\n box-shadow: 0 0 0 0 var(--foo) #000;\n}\n") expectPrintedMangle(t, "a { box-shadow: calc(1px + var(--foo)) 0px 0px 0px black }", "a {\n box-shadow: calc(1px + var(--foo)) 0 0 0 #000;\n}\n") expectPrintedMangle(t, "a { box-shadow: inset 0px 0px 0px 0px 0px magenta; }", "a {\n box-shadow: inset 0 0 0 0 0 #f0f;\n}\n") expectPrintedMangleMinify(t, "a { box-shadow: rebeccapurple , yellow , black }", "a{box-shadow:#639,#ff0,#000}") expectPrintedMangleMinify(t, "a { box-shadow: rgb(255, 0, 17) 0 0 1 inset }", "a{box-shadow:#f01 0 0 1 inset}") } func TestDeduplicateRules(t *testing.T) { expectPrinted(t, "a { color: red; color: green; color: red }", "a {\n color: red;\n color: green;\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red; color: green; color: red }", "a {\n color: green;\n color: red;\n}\n") expectPrinted(t, "a { color: red } a { color: green } a { color: red }", "a {\n color: red;\n}\na {\n color: green;\n}\na {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a { color: green } a { color: red }", "a {\n color: green;\n}\na {\n color: red;\n}\n") expectPrintedMangle(t, "@media screen { a { color: red } } @media screen { a { color: red } }", "@media screen {\n a {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a { color: red } } @media screen { & a { color: red } }", "@media screen {\n a {\n color: red;\n }\n}\n@media screen {\n & a {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a { color: red } } @media screen { a[x] { color: red } }", "@media screen {\n a {\n color: red;\n }\n}\n@media screen {\n a[x] {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a { color: red } } @media screen { a.x { color: red } }", "@media screen {\n a {\n color: red;\n }\n}\n@media screen {\n a.x {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a { color: red } } @media screen { a#x { color: red } }", "@media screen {\n a {\n color: red;\n }\n}\n@media screen {\n a#x {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a { color: red } } @media screen { a:x { color: red } }", "@media screen {\n a {\n color: red;\n }\n}\n@media screen {\n a:x {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a:x { color: red } } @media screen { a:x(y) { color: red } }", "@media screen {\n a:x {\n color: red;\n }\n}\n@media screen {\n a:x(y) {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@media screen { a b { color: red } } @media screen { a + b { color: red } }", "@media screen {\n a b {\n color: red;\n }\n}\n@media screen {\n a + b {\n color: red;\n }\n}\n") } func TestMangleTime(t *testing.T) { expectPrintedMangle(t, "a { animation: b 1s }", "a {\n animation: b 1s;\n}\n") expectPrintedMangle(t, "a { animation: b 1.s }", "a {\n animation: b 1s;\n}\n") expectPrintedMangle(t, "a { animation: b 1.0s }", "a {\n animation: b 1s;\n}\n") expectPrintedMangle(t, "a { animation: b 1.02s }", "a {\n animation: b 1.02s;\n}\n") expectPrintedMangle(t, "a { animation: b .1s }", "a {\n animation: b .1s;\n}\n") expectPrintedMangle(t, "a { animation: b .01s }", "a {\n animation: b .01s;\n}\n") expectPrintedMangle(t, "a { animation: b .001s }", "a {\n animation: b 1ms;\n}\n") expectPrintedMangle(t, "a { animation: b .0012s }", "a {\n animation: b 1.2ms;\n}\n") expectPrintedMangle(t, "a { animation: b -.001s }", "a {\n animation: b -1ms;\n}\n") expectPrintedMangle(t, "a { animation: b -.0012s }", "a {\n animation: b -1.2ms;\n}\n") expectPrintedMangle(t, "a { animation: b .0001s }", "a {\n animation: b .1ms;\n}\n") expectPrintedMangle(t, "a { animation: b .00012s }", "a {\n animation: b .12ms;\n}\n") expectPrintedMangle(t, "a { animation: b .000123s }", "a {\n animation: b .123ms;\n}\n") expectPrintedMangle(t, "a { animation: b .01S }", "a {\n animation: b .01S;\n}\n") expectPrintedMangle(t, "a { animation: b .001S }", "a {\n animation: b 1ms;\n}\n") expectPrintedMangle(t, "a { animation: b 1ms }", "a {\n animation: b 1ms;\n}\n") expectPrintedMangle(t, "a { animation: b 10ms }", "a {\n animation: b 10ms;\n}\n") expectPrintedMangle(t, "a { animation: b 100ms }", "a {\n animation: b .1s;\n}\n") expectPrintedMangle(t, "a { animation: b 120ms }", "a {\n animation: b .12s;\n}\n") expectPrintedMangle(t, "a { animation: b 123ms }", "a {\n animation: b 123ms;\n}\n") expectPrintedMangle(t, "a { animation: b 1000ms }", "a {\n animation: b 1s;\n}\n") expectPrintedMangle(t, "a { animation: b 1200ms }", "a {\n animation: b 1.2s;\n}\n") expectPrintedMangle(t, "a { animation: b 1230ms }", "a {\n animation: b 1.23s;\n}\n") expectPrintedMangle(t, "a { animation: b 1234ms }", "a {\n animation: b 1234ms;\n}\n") expectPrintedMangle(t, "a { animation: b -100ms }", "a {\n animation: b -.1s;\n}\n") expectPrintedMangle(t, "a { animation: b -120ms }", "a {\n animation: b -.12s;\n}\n") expectPrintedMangle(t, "a { animation: b 120mS }", "a {\n animation: b .12s;\n}\n") expectPrintedMangle(t, "a { animation: b 120Ms }", "a {\n animation: b .12s;\n}\n") expectPrintedMangle(t, "a { animation: b 123mS }", "a {\n animation: b 123mS;\n}\n") expectPrintedMangle(t, "a { animation: b 123Ms }", "a {\n animation: b 123Ms;\n}\n") // Mangling times with exponents is not currently supported expectPrintedMangle(t, "a { animation: b 1e3ms }", "a {\n animation: b 1e3ms;\n}\n") expectPrintedMangle(t, "a { animation: b 1E3ms }", "a {\n animation: b 1E3ms;\n}\n") } func TestCalc(t *testing.T) { expectParseError(t, "a { b: calc(+(2)) }", "<stdin>: WARNING: \"+\" can only be used as an infix operator, not a prefix operator\n") expectParseError(t, "a { b: calc(-(2)) }", "<stdin>: WARNING: \"-\" can only be used as an infix operator, not a prefix operator\n") expectParseError(t, "a { b: calc(*(2)) }", "") expectParseError(t, "a { b: calc(/(2)) }", "") expectParseError(t, "a { b: calc(1 + 2) }", "") expectParseError(t, "a { b: calc(1 - 2) }", "") expectParseError(t, "a { b: calc(1 * 2) }", "") expectParseError(t, "a { b: calc(1 / 2) }", "") expectParseError(t, "a { b: calc(1+ 2) }", "<stdin>: WARNING: The \"+\" operator only works if there is whitespace on both sides\n") expectParseError(t, "a { b: calc(1- 2) }", "<stdin>: WARNING: The \"-\" operator only works if there is whitespace on both sides\n") expectParseError(t, "a { b: calc(1* 2) }", "") expectParseError(t, "a { b: calc(1/ 2) }", "") expectParseError(t, "a { b: calc(1 +2) }", "<stdin>: WARNING: The \"+\" operator only works if there is whitespace on both sides\n") expectParseError(t, "a { b: calc(1 -2) }", "<stdin>: WARNING: The \"-\" operator only works if there is whitespace on both sides\n") expectParseError(t, "a { b: calc(1 *2) }", "") expectParseError(t, "a { b: calc(1 /2) }", "") expectParseError(t, "a { b: calc(1 +(2)) }", "<stdin>: WARNING: The \"+\" operator only works if there is whitespace on both sides\n") expectParseError(t, "a { b: calc(1 -(2)) }", "<stdin>: WARNING: The \"-\" operator only works if there is whitespace on both sides\n") expectParseError(t, "a { b: calc(1 *(2)) }", "") expectParseError(t, "a { b: calc(1 /(2)) }", "") } func TestMinifyCalc(t *testing.T) { expectPrintedMangleMinify(t, "a { b: calc(x + y) }", "a{b:calc(x + y)}") expectPrintedMangleMinify(t, "a { b: calc(x - y) }", "a{b:calc(x - y)}") expectPrintedMangleMinify(t, "a { b: calc(x * y) }", "a{b:calc(x*y)}") expectPrintedMangleMinify(t, "a { b: calc(x / y) }", "a{b:calc(x/y)}") } func TestMangleCalc(t *testing.T) { expectPrintedMangle(t, "a { b: calc(1) }", "a {\n b: 1;\n}\n") expectPrintedMangle(t, "a { b: calc((1)) }", "a {\n b: 1;\n}\n") expectPrintedMangle(t, "a { b: calc(calc(1)) }", "a {\n b: 1;\n}\n") expectPrintedMangle(t, "a { b: calc(x + y * z) }", "a {\n b: calc(x + y * z);\n}\n") expectPrintedMangle(t, "a { b: calc(x * y + z) }", "a {\n b: calc(x * y + z);\n}\n") // Test sum expectPrintedMangle(t, "a { b: calc(2 + 3) }", "a {\n b: 5;\n}\n") expectPrintedMangle(t, "a { b: calc(6 - 2) }", "a {\n b: 4;\n}\n") // Test product expectPrintedMangle(t, "a { b: calc(2 * 3) }", "a {\n b: 6;\n}\n") expectPrintedMangle(t, "a { b: calc(6 / 2) }", "a {\n b: 3;\n}\n") expectPrintedMangle(t, "a { b: calc(2px * 3 + 4px * 5) }", "a {\n b: 26px;\n}\n") expectPrintedMangle(t, "a { b: calc(2 * 3px + 4 * 5px) }", "a {\n b: 26px;\n}\n") expectPrintedMangle(t, "a { b: calc(2px * 3 - 4px * 5) }", "a {\n b: -14px;\n}\n") expectPrintedMangle(t, "a { b: calc(2 * 3px - 4 * 5px) }", "a {\n b: -14px;\n}\n") // Test negation expectPrintedMangle(t, "a { b: calc(x + 1) }", "a {\n b: calc(x + 1);\n}\n") expectPrintedMangle(t, "a { b: calc(x - 1) }", "a {\n b: calc(x - 1);\n}\n") expectPrintedMangle(t, "a { b: calc(x + -1) }", "a {\n b: calc(x - 1);\n}\n") expectPrintedMangle(t, "a { b: calc(x - -1) }", "a {\n b: calc(x + 1);\n}\n") expectPrintedMangle(t, "a { b: calc(1 + x) }", "a {\n b: calc(1 + x);\n}\n") expectPrintedMangle(t, "a { b: calc(1 - x) }", "a {\n b: calc(1 - x);\n}\n") expectPrintedMangle(t, "a { b: calc(-1 + x) }", "a {\n b: calc(-1 + x);\n}\n") expectPrintedMangle(t, "a { b: calc(-1 - x) }", "a {\n b: calc(-1 - x);\n}\n") // Test inversion expectPrintedMangle(t, "a { b: calc(x * 4) }", "a {\n b: calc(x * 4);\n}\n") expectPrintedMangle(t, "a { b: calc(x / 4) }", "a {\n b: calc(x / 4);\n}\n") expectPrintedMangle(t, "a { b: calc(x * 0.25) }", "a {\n b: calc(x / 4);\n}\n") expectPrintedMangle(t, "a { b: calc(x / 0.25) }", "a {\n b: calc(x * 4);\n}\n") // Test operator precedence expectPrintedMangle(t, "a { b: calc((a + b) + c) }", "a {\n b: calc(a + b + c);\n}\n") expectPrintedMangle(t, "a { b: calc(a + (b + c)) }", "a {\n b: calc(a + b + c);\n}\n") expectPrintedMangle(t, "a { b: calc((a - b) - c) }", "a {\n b: calc(a - b - c);\n}\n") expectPrintedMangle(t, "a { b: calc(a - (b - c)) }", "a {\n b: calc(a - (b - c));\n}\n") expectPrintedMangle(t, "a { b: calc((a * b) * c) }", "a {\n b: calc(a * b * c);\n}\n") expectPrintedMangle(t, "a { b: calc(a * (b * c)) }", "a {\n b: calc(a * b * c);\n}\n") expectPrintedMangle(t, "a { b: calc((a / b) / c) }", "a {\n b: calc(a / b / c);\n}\n") expectPrintedMangle(t, "a { b: calc(a / (b / c)) }", "a {\n b: calc(a / (b / c));\n}\n") expectPrintedMangle(t, "a { b: calc(a + b * c / d - e) }", "a {\n b: calc(a + b * c / d - e);\n}\n") expectPrintedMangle(t, "a { b: calc((a + ((b * c) / d)) - e) }", "a {\n b: calc(a + b * c / d - e);\n}\n") expectPrintedMangle(t, "a { b: calc((a + b) * c / (d - e)) }", "a {\n b: calc((a + b) * c / (d - e));\n}\n") // Using "var()" should bail because it can expand to any number of tokens expectPrintedMangle(t, "a { b: calc(1px - x + 2px) }", "a {\n b: calc(3px - x);\n}\n") expectPrintedMangle(t, "a { b: calc(1px - var(x) + 2px) }", "a {\n b: calc(1px - var(x) + 2px);\n}\n") // Test values that can't be accurately represented as decimals expectPrintedMangle(t, "a { b: calc(100% / 1) }", "a {\n b: 100%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 2) }", "a {\n b: 50%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 3) }", "a {\n b: calc(100% / 3);\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 4) }", "a {\n b: 25%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 5) }", "a {\n b: 20%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 6) }", "a {\n b: calc(100% / 6);\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 7) }", "a {\n b: calc(100% / 7);\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 8) }", "a {\n b: 12.5%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 9) }", "a {\n b: calc(100% / 9);\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 10) }", "a {\n b: 10%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 100) }", "a {\n b: 1%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 1000) }", "a {\n b: .1%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 10000) }", "a {\n b: .01%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 100000) }", "a {\n b: .001%;\n}\n") expectPrintedMangle(t, "a { b: calc(100% / 1000000) }", "a {\n b: calc(100% / 1000000);\n}\n") // This actually ends up as "100% * (1 / 1000000)" which is less precise expectPrintedMangle(t, "a { b: calc(100% / -1000000) }", "a {\n b: calc(100% / -1000000);\n}\n") expectPrintedMangle(t, "a { b: calc(100% / -100000) }", "a {\n b: -.001%;\n}\n") expectPrintedMangle(t, "a { b: calc(3 * (2px + 1em / 7)) }", "a {\n b: calc(3 * (2px + 1em / 7));\n}\n") expectPrintedMangle(t, "a { b: calc(3 * (2px + 1em / 8)) }", "a {\n b: calc(3 * (2px + .125em));\n}\n") // Non-finite numbers expectPrintedMangle(t, "a { b: calc(0px / 0) }", "a {\n b: calc(0px / 0);\n}\n") expectPrintedMangle(t, "a { b: calc(1px / 0) }", "a {\n b: calc(1px / 0);\n}\n") expectPrintedMangle(t, "a { b: calc(-1px / 0) }", "a {\n b: calc(-1px / 0);\n}\n") expectPrintedMangle(t, "a { b: calc(nan) }", "a {\n b: calc(nan);\n}\n") expectPrintedMangle(t, "a { b: calc(infinity) }", "a {\n b: calc(infinity);\n}\n") expectPrintedMangle(t, "a { b: calc(-infinity) }", "a {\n b: calc(-infinity);\n}\n") expectPrintedMangle(t, "a { b: calc(1px / nan) }", "a {\n b: calc(1px / nan);\n}\n") expectPrintedMangle(t, "a { b: calc(1px / infinity) }", "a {\n b: 0px;\n}\n") expectPrintedMangle(t, "a { b: calc(1px / -infinity) }", "a {\n b: -0px;\n}\n") } func TestTransform(t *testing.T) { expectPrintedMangle(t, "a { transform: matrix(1, 0, 0, 1, 0, 0) }", "a {\n transform: scale(1);\n}\n") expectPrintedMangle(t, "a { transform: matrix(2, 0, 0, 1, 0, 0) }", "a {\n transform: scaleX(2);\n}\n") expectPrintedMangle(t, "a { transform: matrix(1, 0, 0, 2, 0, 0) }", "a {\n transform: scaleY(2);\n}\n") expectPrintedMangle(t, "a { transform: matrix(2, 0, 0, 3, 0, 0) }", "a {\n transform: scale(2, 3);\n}\n") expectPrintedMangle(t, "a { transform: matrix(2, 0, 0, 2, 0, 0) }", "a {\n transform: scale(2);\n}\n") expectPrintedMangle(t, "a { transform: matrix(1, 0, 0, 1, 1, 2) }", "a {\n transform: matrix(1, 0, 0, 1, 1, 2);\n}\n") expectPrintedMangle(t, "a { transform: translate(0, 0) }", "a {\n transform: translate(0);\n}\n") expectPrintedMangle(t, "a { transform: translate(0px, 0px) }", "a {\n transform: translate(0);\n}\n") expectPrintedMangle(t, "a { transform: translate(0%, 0%) }", "a {\n transform: translate(0);\n}\n") expectPrintedMangle(t, "a { transform: translate(1px, 0) }", "a {\n transform: translate(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate(1px, 0px) }", "a {\n transform: translate(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate(1px, 0%) }", "a {\n transform: translate(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate(0, 1px) }", "a {\n transform: translateY(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate(0px, 1px) }", "a {\n transform: translateY(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate(0%, 1px) }", "a {\n transform: translateY(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate(1px, 2px) }", "a {\n transform: translate(1px, 2px);\n}\n") expectPrintedMangle(t, "a { transform: translate(40%, 60%) }", "a {\n transform: translate(40%, 60%);\n}\n") expectPrintedMangle(t, "a { transform: translateX(0) }", "a {\n transform: translate(0);\n}\n") expectPrintedMangle(t, "a { transform: translateX(0px) }", "a {\n transform: translate(0);\n}\n") expectPrintedMangle(t, "a { transform: translateX(0%) }", "a {\n transform: translate(0);\n}\n") expectPrintedMangle(t, "a { transform: translateX(1px) }", "a {\n transform: translate(1px);\n}\n") expectPrintedMangle(t, "a { transform: translateX(50%) }", "a {\n transform: translate(50%);\n}\n") expectPrintedMangle(t, "a { transform: translateY(0) }", "a {\n transform: translateY(0);\n}\n") expectPrintedMangle(t, "a { transform: translateY(0px) }", "a {\n transform: translateY(0);\n}\n") expectPrintedMangle(t, "a { transform: translateY(0%) }", "a {\n transform: translateY(0);\n}\n") expectPrintedMangle(t, "a { transform: translateY(1px) }", "a {\n transform: translateY(1px);\n}\n") expectPrintedMangle(t, "a { transform: translateY(50%) }", "a {\n transform: translateY(50%);\n}\n") expectPrintedMangle(t, "a { transform: scale(1) }", "a {\n transform: scale(1);\n}\n") expectPrintedMangle(t, "a { transform: scale(100%) }", "a {\n transform: scale(1);\n}\n") expectPrintedMangle(t, "a { transform: scale(10%) }", "a {\n transform: scale(.1);\n}\n") expectPrintedMangle(t, "a { transform: scale(99%) }", "a {\n transform: scale(99%);\n}\n") expectPrintedMangle(t, "a { transform: scale(1, 1) }", "a {\n transform: scale(1);\n}\n") expectPrintedMangle(t, "a { transform: scale(100%, 1) }", "a {\n transform: scale(1);\n}\n") expectPrintedMangle(t, "a { transform: scale(10%, 0.1) }", "a {\n transform: scale(.1);\n}\n") expectPrintedMangle(t, "a { transform: scale(99%, 0.99) }", "a {\n transform: scale(99%, .99);\n}\n") expectPrintedMangle(t, "a { transform: scale(60%, 40%) }", "a {\n transform: scale(.6, .4);\n}\n") expectPrintedMangle(t, "a { transform: scale(3, 1) }", "a {\n transform: scaleX(3);\n}\n") expectPrintedMangle(t, "a { transform: scale(300%, 1) }", "a {\n transform: scaleX(3);\n}\n") expectPrintedMangle(t, "a { transform: scale(1, 3) }", "a {\n transform: scaleY(3);\n}\n") expectPrintedMangle(t, "a { transform: scale(1, 300%) }", "a {\n transform: scaleY(3);\n}\n") expectPrintedMangle(t, "a { transform: scaleX(1) }", "a {\n transform: scaleX(1);\n}\n") expectPrintedMangle(t, "a { transform: scaleX(2) }", "a {\n transform: scaleX(2);\n}\n") expectPrintedMangle(t, "a { transform: scaleX(300%) }", "a {\n transform: scaleX(3);\n}\n") expectPrintedMangle(t, "a { transform: scaleX(99%) }", "a {\n transform: scaleX(99%);\n}\n") expectPrintedMangle(t, "a { transform: scaleY(1) }", "a {\n transform: scaleY(1);\n}\n") expectPrintedMangle(t, "a { transform: scaleY(2) }", "a {\n transform: scaleY(2);\n}\n") expectPrintedMangle(t, "a { transform: scaleY(300%) }", "a {\n transform: scaleY(3);\n}\n") expectPrintedMangle(t, "a { transform: scaleY(99%) }", "a {\n transform: scaleY(99%);\n}\n") expectPrintedMangle(t, "a { transform: rotate(0) }", "a {\n transform: rotate(0);\n}\n") expectPrintedMangle(t, "a { transform: rotate(0deg) }", "a {\n transform: rotate(0);\n}\n") expectPrintedMangle(t, "a { transform: rotate(1deg) }", "a {\n transform: rotate(1deg);\n}\n") expectPrintedMangle(t, "a { transform: skew(0) }", "a {\n transform: skew(0);\n}\n") expectPrintedMangle(t, "a { transform: skew(0deg) }", "a {\n transform: skew(0);\n}\n") expectPrintedMangle(t, "a { transform: skew(1deg) }", "a {\n transform: skew(1deg);\n}\n") expectPrintedMangle(t, "a { transform: skew(1deg, 0) }", "a {\n transform: skew(1deg);\n}\n") expectPrintedMangle(t, "a { transform: skew(1deg, 0deg) }", "a {\n transform: skew(1deg);\n}\n") expectPrintedMangle(t, "a { transform: skew(0, 1deg) }", "a {\n transform: skew(0, 1deg);\n}\n") expectPrintedMangle(t, "a { transform: skew(0deg, 1deg) }", "a {\n transform: skew(0, 1deg);\n}\n") expectPrintedMangle(t, "a { transform: skew(1deg, 2deg) }", "a {\n transform: skew(1deg, 2deg);\n}\n") expectPrintedMangle(t, "a { transform: skewX(0) }", "a {\n transform: skew(0);\n}\n") expectPrintedMangle(t, "a { transform: skewX(0deg) }", "a {\n transform: skew(0);\n}\n") expectPrintedMangle(t, "a { transform: skewX(1deg) }", "a {\n transform: skew(1deg);\n}\n") expectPrintedMangle(t, "a { transform: skewY(0) }", "a {\n transform: skewY(0);\n}\n") expectPrintedMangle(t, "a { transform: skewY(0deg) }", "a {\n transform: skewY(0);\n}\n") expectPrintedMangle(t, "a { transform: skewY(1deg) }", "a {\n transform: skewY(1deg);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2) }", "a {\n transform: matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 2, 3, 4, 1) }", "a {\n transform: matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 2, 3, 4, 1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1) }", "a {\n transform: matrix3d(1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1) }", "a {\n transform: scaleZ(1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(2, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1) }", "a {\n transform: scale3d(2, 1, 1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1) }", "a {\n transform: scale3d(1, 2, 1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1) }", "a {\n transform: scale3d(2, 2, 1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1) }", "a {\n transform: scale3d(2, 3, 1);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1) }", "a {\n transform: scaleZ(2);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1) }", "a {\n transform: scale3d(1, 2, 3);\n}\n") expectPrintedMangle(t, "a { transform: matrix3d(2, 3, 0, 0, 4, 5, 0, 0, 0, 0, 1, 0, 6, 7, 0, 1) }", "a {\n transform: matrix3d(2, 3, 0, 0, 4, 5, 0, 0, 0, 0, 1, 0, 6, 7, 0, 1);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(0, 0, 0) }", "a {\n transform: translateZ(0);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(0%, 0%, 0) }", "a {\n transform: translateZ(0);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(0px, 0px, 0px) }", "a {\n transform: translateZ(0);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(1px, 0px, 0px) }", "a {\n transform: translate3d(1px, 0, 0);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(0px, 1px, 0px) }", "a {\n transform: translate3d(0, 1px, 0);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(0px, 0px, 1px) }", "a {\n transform: translateZ(1px);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(1px, 2px, 3px) }", "a {\n transform: translate3d(1px, 2px, 3px);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(1px, 0, 3px) }", "a {\n transform: translate3d(1px, 0, 3px);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(0, 2px, 3px) }", "a {\n transform: translate3d(0, 2px, 3px);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(1px, 2px, 0px) }", "a {\n transform: translate3d(1px, 2px, 0);\n}\n") expectPrintedMangle(t, "a { transform: translate3d(40%, 60%, 0px) }", "a {\n transform: translate3d(40%, 60%, 0);\n}\n") expectPrintedMangle(t, "a { transform: translateZ(0) }", "a {\n transform: translateZ(0);\n}\n") expectPrintedMangle(t, "a { transform: translateZ(0px) }", "a {\n transform: translateZ(0);\n}\n") expectPrintedMangle(t, "a { transform: translateZ(1px) }", "a {\n transform: translateZ(1px);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(1, 1, 1) }", "a {\n transform: scaleZ(1);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(2, 1, 1) }", "a {\n transform: scale3d(2, 1, 1);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(1, 2, 1) }", "a {\n transform: scale3d(1, 2, 1);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(1, 1, 2) }", "a {\n transform: scaleZ(2);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(1, 2, 3) }", "a {\n transform: scale3d(1, 2, 3);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(2, 3, 1) }", "a {\n transform: scale3d(2, 3, 1);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(2, 2, 1) }", "a {\n transform: scale3d(2, 2, 1);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(3, 300%, 100.00%) }", "a {\n transform: scale3d(3, 3, 1);\n}\n") expectPrintedMangle(t, "a { transform: scale3d(1%, 2%, 3%) }", "a {\n transform: scale3d(1%, 2%, 3%);\n}\n") expectPrintedMangle(t, "a { transform: scaleZ(1) }", "a {\n transform: scaleZ(1);\n}\n") expectPrintedMangle(t, "a { transform: scaleZ(100%) }", "a {\n transform: scaleZ(1);\n}\n") expectPrintedMangle(t, "a { transform: scaleZ(2) }", "a {\n transform: scaleZ(2);\n}\n") expectPrintedMangle(t, "a { transform: scaleZ(200%) }", "a {\n transform: scaleZ(2);\n}\n") expectPrintedMangle(t, "a { transform: scaleZ(99%) }", "a {\n transform: scaleZ(99%);\n}\n") expectPrintedMangle(t, "a { transform: rotate3d(0, 0, 0, 0) }", "a {\n transform: rotate3d(0, 0, 0, 0);\n}\n") expectPrintedMangle(t, "a { transform: rotate3d(0, 0, 0, 0deg) }", "a {\n transform: rotate3d(0, 0, 0, 0);\n}\n") expectPrintedMangle(t, "a { transform: rotate3d(0, 0, 0, 45deg) }", "a {\n transform: rotate3d(0, 0, 0, 45deg);\n}\n") expectPrintedMangle(t, "a { transform: rotate3d(1, 0, 0, 45deg) }", "a {\n transform: rotateX(45deg);\n}\n") expectPrintedMangle(t, "a { transform: rotate3d(0, 1, 0, 45deg) }", "a {\n transform: rotateY(45deg);\n}\n") expectPrintedMangle(t, "a { transform: rotate3d(0, 0, 1, 45deg) }", "a {\n transform: rotate3d(0, 0, 1, 45deg);\n}\n") expectPrintedMangle(t, "a { transform: rotateX(0) }", "a {\n transform: rotateX(0);\n}\n") expectPrintedMangle(t, "a { transform: rotateX(0deg) }", "a {\n transform: rotateX(0);\n}\n") expectPrintedMangle(t, "a { transform: rotateX(1deg) }", "a {\n transform: rotateX(1deg);\n}\n") expectPrintedMangle(t, "a { transform: rotateY(0) }", "a {\n transform: rotateY(0);\n}\n") expectPrintedMangle(t, "a { transform: rotateY(0deg) }", "a {\n transform: rotateY(0);\n}\n") expectPrintedMangle(t, "a { transform: rotateY(1deg) }", "a {\n transform: rotateY(1deg);\n}\n") expectPrintedMangle(t, "a { transform: rotateZ(0) }", "a {\n transform: rotate(0);\n}\n") expectPrintedMangle(t, "a { transform: rotateZ(0deg) }", "a {\n transform: rotate(0);\n}\n") expectPrintedMangle(t, "a { transform: rotateZ(1deg) }", "a {\n transform: rotate(1deg);\n}\n") expectPrintedMangle(t, "a { transform: perspective(0) }", "a {\n transform: perspective(0);\n}\n") expectPrintedMangle(t, "a { transform: perspective(0px) }", "a {\n transform: perspective(0);\n}\n") expectPrintedMangle(t, "a { transform: perspective(1px) }", "a {\n transform: perspective(1px);\n}\n") } func TestMangleAlpha(t *testing.T) { alphas := []string{ "0", ".004", ".008", ".01", ".016", ".02", ".024", ".027", ".03", ".035", ".04", ".043", ".047", ".05", ".055", ".06", ".063", ".067", ".07", ".075", ".08", ".082", ".086", ".09", ".094", ".098", ".1", ".106", ".11", ".114", ".118", ".12", ".125", ".13", ".133", ".137", ".14", ".145", ".15", ".153", ".157", ".16", ".165", ".17", ".173", ".176", ".18", ".184", ".19", ".192", ".196", ".2", ".204", ".208", ".21", ".216", ".22", ".224", ".227", ".23", ".235", ".24", ".243", ".247", ".25", ".255", ".26", ".263", ".267", ".27", ".275", ".28", ".282", ".286", ".29", ".294", ".298", ".3", ".306", ".31", ".314", ".318", ".32", ".325", ".33", ".333", ".337", ".34", ".345", ".35", ".353", ".357", ".36", ".365", ".37", ".373", ".376", ".38", ".384", ".39", ".392", ".396", ".4", ".404", ".408", ".41", ".416", ".42", ".424", ".427", ".43", ".435", ".44", ".443", ".447", ".45", ".455", ".46", ".463", ".467", ".47", ".475", ".48", ".482", ".486", ".49", ".494", ".498", ".5", ".506", ".51", ".514", ".518", ".52", ".525", ".53", ".533", ".537", ".54", ".545", ".55", ".553", ".557", ".56", ".565", ".57", ".573", ".576", ".58", ".584", ".59", ".592", ".596", ".6", ".604", ".608", ".61", ".616", ".62", ".624", ".627", ".63", ".635", ".64", ".643", ".647", ".65", ".655", ".66", ".663", ".667", ".67", ".675", ".68", ".682", ".686", ".69", ".694", ".698", ".7", ".706", ".71", ".714", ".718", ".72", ".725", ".73", ".733", ".737", ".74", ".745", ".75", ".753", ".757", ".76", ".765", ".77", ".773", ".776", ".78", ".784", ".79", ".792", ".796", ".8", ".804", ".808", ".81", ".816", ".82", ".824", ".827", ".83", ".835", ".84", ".843", ".847", ".85", ".855", ".86", ".863", ".867", ".87", ".875", ".88", ".882", ".886", ".89", ".894", ".898", ".9", ".906", ".91", ".914", ".918", ".92", ".925", ".93", ".933", ".937", ".94", ".945", ".95", ".953", ".957", ".96", ".965", ".97", ".973", ".976", ".98", ".984", ".99", ".992", ".996", } for i, alpha := range alphas { expectPrintedLowerMangle(t, fmt.Sprintf("a { color: #%08X }", i), "a {\n color: rgba(0, 0, 0, "+alpha+");\n}\n") } // An alpha value of 100% does not use "rgba(...)" expectPrintedLowerMangle(t, "a { color: #000000FF }", "a {\n color: #000;\n}\n") } func TestMangleDuplicateSelectorRules(t *testing.T) { expectPrinted(t, "a { color: red } b { color: red }", "a {\n color: red;\n}\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } b { color: red }", "a,\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } div {} b { color: red }", "a,\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } div { color: red } b { color: red }", "a,\ndiv,\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } div { color: red } a { color: red }", "a,\ndiv {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } div { color: blue } b { color: red }", "a {\n color: red;\n}\ndiv {\n color: #00f;\n}\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } div { color: blue } a { color: red }", "div {\n color: #00f;\n}\na {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red; color: red } b { color: red }", "a,\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } b { color: red; color: red }", "a,\nb {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } b { color: blue }", "a {\n color: red;\n}\nb {\n color: #00f;\n}\n") // Do not merge duplicates if they are "unsafe" expectPrintedMangle(t, "a { color: red } unknown { color: red }", "a {\n color: red;\n}\nunknown {\n color: red;\n}\n") expectPrintedMangle(t, "unknown { color: red } a { color: red }", "unknown {\n color: red;\n}\na {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } video { color: red }", "a {\n color: red;\n}\nvideo {\n color: red;\n}\n") expectPrintedMangle(t, "video { color: red } a { color: red }", "video {\n color: red;\n}\na {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a:last-child { color: red }", "a {\n color: red;\n}\na:last-child {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a[b=c i] { color: red }", "a {\n color: red;\n}\na[b=c i] {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } & { color: red }", "a {\n color: red;\n}\n& {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a + b { color: red }", "a {\n color: red;\n}\na + b {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a|b { color: red }", "a {\n color: red;\n}\na|b {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a::hover { color: red }", "a {\n color: red;\n}\na::hover {\n color: red;\n}\n") // Still merge duplicates if they are "safe" expectPrintedMangle(t, "a { color: red } a:hover { color: red }", "a,\na:hover {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a[b=c] { color: red }", "a,\na[b=c] {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a#id { color: red }", "a,\na#id {\n color: red;\n}\n") expectPrintedMangle(t, "a { color: red } a.cls { color: red }", "a,\na.cls {\n color: red;\n}\n") // Skip over comments expectPrintedMangle(t, "c { color: green } a { color: red } /*!x*/ /*!y*/ b { color: blue }", "c {\n color: green;\n}\na {\n color: red;\n}\n/*!x*/\n/*!y*/\nb {\n color: #00f;\n}\n") expectPrintedMangle(t, "c { color: green } a { color: red } /*!x*/ /*!y*/ b { color: red }", "c {\n color: green;\n}\na,\nb {\n color: red;\n}\n/*!x*/\n/*!y*/\n") expectPrintedMangle(t, "c { color: green } a { color: red } /*!x*/ /*!y*/ a { color: red }", "c {\n color: green;\n}\na {\n color: red;\n}\n/*!x*/\n/*!y*/\n") } func TestFontWeight(t *testing.T) { expectPrintedMangle(t, "a { font-weight: normal }", "a {\n font-weight: 400;\n}\n") expectPrintedMangle(t, "a { font-weight: bold }", "a {\n font-weight: 700;\n}\n") expectPrintedMangle(t, "a { font-weight: 400 }", "a {\n font-weight: 400;\n}\n") expectPrintedMangle(t, "a { font-weight: bolder }", "a {\n font-weight: bolder;\n}\n") expectPrintedMangle(t, "a { font-weight: var(--var) }", "a {\n font-weight: var(--var);\n}\n") expectPrintedMangleMinify(t, "a { font-weight: normal }", "a{font-weight:400}") } func TestFontFamily(t *testing.T) { expectPrintedMangle(t, "a {font-family: aaa }", "a {\n font-family: aaa;\n}\n") expectPrintedMangle(t, "a {font-family: serif }", "a {\n font-family: serif;\n}\n") expectPrintedMangle(t, "a {font-family: 'serif' }", "a {\n font-family: \"serif\";\n}\n") expectPrintedMangle(t, "a {font-family: aaa bbb, serif }", "a {\n font-family: aaa bbb, serif;\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa', serif }", "a {\n font-family: aaa, serif;\n}\n") expectPrintedMangle(t, "a {font-family: '\"', serif }", "a {\n font-family: '\"', serif;\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa ', serif }", "a {\n font-family: \"aaa \", serif;\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa bbb', serif }", "a {\n font-family: aaa bbb, serif;\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa bbb', 'ccc ddd' }", "a {\n font-family: aaa bbb, ccc ddd;\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa bbb', serif }", "a {\n font-family: \"aaa bbb\", serif;\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa serif' }", "a {\n font-family: \"aaa serif\";\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa bbb', var(--var) }", "a {\n font-family: \"aaa bbb\", var(--var);\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa bbb', }", "a {\n font-family: \"aaa bbb\", ;\n}\n") expectPrintedMangle(t, "a {font-family: , 'aaa bbb' }", "a {\n font-family: , \"aaa bbb\";\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa',, 'bbb' }", "a {\n font-family:\n \"aaa\",,\n \"bbb\";\n}\n") expectPrintedMangle(t, "a {font-family: 'aaa bbb', x serif }", "a {\n font-family: \"aaa bbb\", x serif;\n}\n") expectPrintedMangleMinify(t, "a {font-family: 'aaa bbb', serif }", "a{font-family:aaa bbb,serif}") expectPrintedMangleMinify(t, "a {font-family: 'aaa bbb', 'ccc ddd' }", "a{font-family:aaa bbb,ccc ddd}") } func TestFont(t *testing.T) { expectPrintedMangle(t, "a { font: caption }", "a {\n font: caption;\n}\n") expectPrintedMangle(t, "a { font: normal 1px }", "a {\n font: normal 1px;\n}\n") expectPrintedMangle(t, "a { font: normal bold }", "a {\n font: normal bold;\n}\n") expectPrintedMangle(t, "a { font: 1rem 'aaa bbb' }", "a {\n font: 1rem aaa bbb;\n}\n") expectPrintedMangle(t, "a { font: 1rem/1.2 'aaa bbb' }", "a {\n font: 1rem/1.2 aaa bbb;\n}\n") expectPrintedMangle(t, "a { font: normal 1rem 'aaa bbb' }", "a {\n font: 1rem aaa bbb;\n}\n") expectPrintedMangle(t, "a { font: normal 1rem 'aaa bbb', serif }", "a {\n font: 1rem aaa bbb, serif;\n}\n") expectPrintedMangle(t, "a { font: italic small-caps bold ultra-condensed 1rem/1.2 'aaa bbb' }", "a {\n font: italic small-caps 700 ultra-condensed 1rem/1.2 aaa bbb;\n}\n") expectPrintedMangle(t, "a { font: oblique 1px 'aaa bbb' }", "a {\n font: oblique 1px aaa bbb;\n}\n") expectPrintedMangle(t, "a { font: oblique 45deg 1px 'aaa bbb' }", "a {\n font: oblique 45deg 1px aaa bbb;\n}\n") expectPrintedMangle(t, "a { font: var(--var) 'aaa bbb' }", "a {\n font: var(--var) \"aaa bbb\";\n}\n") expectPrintedMangle(t, "a { font: normal var(--var) 'aaa bbb' }", "a {\n font: normal var(--var) \"aaa bbb\";\n}\n") expectPrintedMangle(t, "a { font: normal 1rem var(--var), 'aaa bbb' }", "a {\n font: normal 1rem var(--var), \"aaa bbb\";\n}\n") expectPrintedMangleMinify(t, "a { font: italic small-caps bold ultra-condensed 1rem/1.2 'aaa bbb' }", "a{font:italic small-caps 700 ultra-condensed 1rem/1.2 aaa bbb}") expectPrintedMangleMinify(t, "a { font: italic small-caps bold ultra-condensed 1rem / 1.2 'aaa bbb' }", "a{font:italic small-caps 700 ultra-condensed 1rem/1.2 aaa bbb}") } func TestWarningUnexpectedCloseBrace(t *testing.T) { expectParseError(t, ".red {\n color: red;\n}\n}\n.blue {\n color: blue;\n}\n.green {\n color: green;\n}\n", `<stdin>: WARNING: Unexpected "}" `) expectPrinted(t, ".red {\n color: red;\n}\n}\n.blue {\n color: blue;\n}\n.green {\n color: green;\n}\n", `.red { color: red; } } .blue { color: blue; } .green { color: green; } `) } func TestPropertyTypoWarning(t *testing.T) { expectParseError(t, "a { z-idnex: 0 }", "<stdin>: WARNING: \"z-idnex\" is not a known CSS property\nNOTE: Did you mean \"z-index\" instead?\n") expectParseError(t, "a { x-index: 0 }", "<stdin>: WARNING: \"x-index\" is not a known CSS property\nNOTE: Did you mean \"z-index\" instead?\n") // CSS variables should not be corrected expectParseError(t, "a { --index: 0 }", "") // Short names should not be corrected ("alt" is actually valid in WebKit, and should not become "all") expectParseError(t, "a { alt: \"\" }", "") } ================================================ FILE: lib/esbuild/css_parser/css_reduce_calc.go ================================================ package css_parser import ( "fmt" "math" "strconv" "strings" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) func (p *parser) tryToReduceCalcExpression(token css_ast.Token) css_ast.Token { if term := tryToParseCalcTerm(*token.Children); term != nil { whitespace := css_ast.WhitespaceBefore | css_ast.WhitespaceAfter if p.options.MinifyWhitespace { whitespace = 0 } term = term.partiallySimplify() if result, ok := term.convertToToken(whitespace); ok { if result.Kind == css_lexer.TOpenParen { result.Kind = css_lexer.TFunction result.Text = "calc" } return result } } return token } // See: https://www.w3.org/TR/css-values-4/#calc-internal type calcTerm interface { convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) partiallySimplify() calcTerm } type calcSum struct { terms []calcTerm } type calcProduct struct { terms []calcTerm } type calcNegate struct { term calcTerm } type calcInvert struct { term calcTerm } type calcNumeric struct { unit string number float64 } type calcValue struct { token css_ast.Token isInvalidPlusOrMinus bool } func floatToStringForCalc(a float64) (string, bool) { // Handle non-finite cases if math.IsNaN(a) || math.IsInf(a, 0) { return "", false } // Print the number as a string text := fmt.Sprintf("%.05f", a) for text[len(text)-1] == '0' { text = text[:len(text)-1] } if text[len(text)-1] == '.' { text = text[:len(text)-1] } if strings.HasPrefix(text, "0.") { text = text[1:] } else if strings.HasPrefix(text, "-0.") { text = "-" + text[2:] } // Bail if the number is not exactly represented if number, err := strconv.ParseFloat(text, 64); err != nil || number != a { return "", false } return text, true } func (c *calcSum) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) { // Specification: https://www.w3.org/TR/css-values-4/#calc-serialize tokens := make([]css_ast.Token, 0, len(c.terms)*2) // ALGORITHM DEVIATION: Avoid parenthesizing product nodes inside sum nodes if product, ok := c.terms[0].(*calcProduct); ok { token, ok := product.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, *token.Children...) } else { token, ok := c.terms[0].convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, token) } for _, term := range c.terms[1:] { // If child is a Negate node, append " - " to s, then serialize the Negate’s child and append the result to s. if negate, ok := term.(*calcNegate); ok { token, ok := negate.term.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, css_ast.Token{ Kind: css_lexer.TDelimMinus, Text: "-", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter, }, token) continue } // If child is a negative numeric value, append " - " to s, then serialize the negation of child as normal and append the result to s. if numeric, ok := term.(*calcNumeric); ok && numeric.number < 0 { clone := *numeric clone.number = -clone.number token, ok := clone.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, css_ast.Token{ Kind: css_lexer.TDelimMinus, Text: "-", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter, }, token) continue } // Otherwise, append " + " to s, then serialize child and append the result to s. tokens = append(tokens, css_ast.Token{ Kind: css_lexer.TDelimPlus, Text: "+", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter, }) // ALGORITHM DEVIATION: Avoid parenthesizing product nodes inside sum nodes if product, ok := term.(*calcProduct); ok { token, ok := product.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, *token.Children...) } else { token, ok := term.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, token) } } return css_ast.Token{ Kind: css_lexer.TOpenParen, Text: "(", Children: &tokens, }, true } func (c *calcProduct) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) { // Specification: https://www.w3.org/TR/css-values-4/#calc-serialize tokens := make([]css_ast.Token, 0, len(c.terms)*2) token, ok := c.terms[0].convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, token) for _, term := range c.terms[1:] { // If child is an Invert node, append " / " to s, then serialize the Invert’s child and append the result to s. if invert, ok := term.(*calcInvert); ok { token, ok := invert.term.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, css_ast.Token{ Kind: css_lexer.TDelimSlash, Text: "/", Whitespace: whitespace, }, token) continue } // Otherwise, append " * " to s, then serialize child and append the result to s. token, ok := term.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } tokens = append(tokens, css_ast.Token{ Kind: css_lexer.TDelimAsterisk, Text: "*", Whitespace: whitespace, }, token) } return css_ast.Token{ Kind: css_lexer.TOpenParen, Text: "(", Children: &tokens, }, true } func (c *calcNegate) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) { // Specification: https://www.w3.org/TR/css-values-4/#calc-serialize token, ok := c.term.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } return css_ast.Token{ Kind: css_lexer.TOpenParen, Text: "(", Children: &[]css_ast.Token{ {Kind: css_lexer.TNumber, Text: "-1"}, {Kind: css_lexer.TDelimSlash, Text: "*", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter}, token, }, }, true } func (c *calcInvert) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) { // Specification: https://www.w3.org/TR/css-values-4/#calc-serialize token, ok := c.term.convertToToken(whitespace) if !ok { return css_ast.Token{}, false } return css_ast.Token{ Kind: css_lexer.TOpenParen, Text: "(", Children: &[]css_ast.Token{ {Kind: css_lexer.TNumber, Text: "1"}, {Kind: css_lexer.TDelimSlash, Text: "/", Whitespace: css_ast.WhitespaceBefore | css_ast.WhitespaceAfter}, token, }, }, true } func (c *calcNumeric) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) { text, ok := floatToStringForCalc(c.number) if !ok { return css_ast.Token{}, false } if c.unit == "" { return css_ast.Token{ Kind: css_lexer.TNumber, Text: text, }, true } else if c.unit == "%" { return css_ast.Token{ Kind: css_lexer.TPercentage, Text: text + "%", }, true } else { return css_ast.Token{ Kind: css_lexer.TDimension, Text: text + c.unit, UnitOffset: uint16(len(text)), }, true } } func (c *calcValue) convertToToken(whitespace css_ast.WhitespaceFlags) (css_ast.Token, bool) { t := c.token t.Whitespace = 0 return t, true } func (c *calcSum) partiallySimplify() calcTerm { // Specification: https://www.w3.org/TR/css-values-4/#calc-simplification // For each of root’s children that are Sum nodes, replace them with their children. terms := make([]calcTerm, 0, len(c.terms)) for _, term := range c.terms { term = term.partiallySimplify() if sum, ok := term.(*calcSum); ok { terms = append(terms, sum.terms...) } else { terms = append(terms, term) } } // For each set of root’s children that are numeric values with identical units, remove // those children and replace them with a single numeric value containing the sum of the // removed nodes, and with the same unit. (E.g. combine numbers, combine percentages, // combine px values, etc.) for i := 0; i < len(terms); i++ { term := terms[i] if numeric, ok := term.(*calcNumeric); ok { end := i + 1 for j := end; j < len(terms); j++ { term2 := terms[j] if numeric2, ok := term2.(*calcNumeric); ok && numeric2.unit == numeric.unit { numeric.number += numeric2.number } else { terms[end] = term2 end++ } } terms = terms[:end] } } // If root has only a single child at this point, return the child. if len(terms) == 1 { return terms[0] } // Otherwise, return root. c.terms = terms return c } func (c *calcProduct) partiallySimplify() calcTerm { // Specification: https://www.w3.org/TR/css-values-4/#calc-simplification // For each of root’s children that are Product nodes, replace them with their children. terms := make([]calcTerm, 0, len(c.terms)) for _, term := range c.terms { term = term.partiallySimplify() if product, ok := term.(*calcProduct); ok { terms = append(terms, product.terms...) } else { terms = append(terms, term) } } // If root has multiple children that are numbers (not percentages or dimensions), remove // them and replace them with a single number containing the product of the removed nodes. for i, term := range terms { if numeric, ok := term.(*calcNumeric); ok && numeric.unit == "" { end := i + 1 for j := end; j < len(terms); j++ { term2 := terms[j] if numeric2, ok := term2.(*calcNumeric); ok && numeric2.unit == "" { numeric.number *= numeric2.number } else { terms[end] = term2 end++ } } terms = terms[:end] break } } // If root contains only numeric values and/or Invert nodes containing numeric values, // and multiplying the types of all the children (noting that the type of an Invert // node is the inverse of its child’s type) results in a type that matches any of the // types that a math function can resolve to, return the result of multiplying all the // values of the children (noting that the value of an Invert node is the reciprocal // of its child’s value), expressed in the result’s canonical unit. if len(terms) == 2 { // Right now, only handle the case of two numbers, one of which has no unit if first, ok := terms[0].(*calcNumeric); ok { if second, ok := terms[1].(*calcNumeric); ok { if first.unit == "" { second.number *= first.number return second } if second.unit == "" { first.number *= second.number return first } } } } // ALGORITHM DEVIATION: Divide instead of multiply if the reciprocal is shorter for i := 1; i < len(terms); i++ { if numeric, ok := terms[i].(*calcNumeric); ok { reciprocal := 1 / numeric.number if multiply, ok := floatToStringForCalc(numeric.number); ok { if divide, ok := floatToStringForCalc(reciprocal); ok && len(divide) < len(multiply) { numeric.number = reciprocal terms[i] = &calcInvert{term: numeric} } } } } // If root has only a single child at this point, return the child. if len(terms) == 1 { return terms[0] } // Otherwise, return root. c.terms = terms return c } func (c *calcNegate) partiallySimplify() calcTerm { // Specification: https://www.w3.org/TR/css-values-4/#calc-simplification c.term = c.term.partiallySimplify() // If root’s child is a numeric value, return an equivalent numeric value, but with the value negated (0 - value). if numeric, ok := c.term.(*calcNumeric); ok { numeric.number = -numeric.number return numeric } // If root’s child is a Negate node, return the child’s child. if negate, ok := c.term.(*calcNegate); ok { return negate.term } return c } func (c *calcInvert) partiallySimplify() calcTerm { // Specification: https://www.w3.org/TR/css-values-4/#calc-simplification c.term = c.term.partiallySimplify() // If root’s child is a number (not a percentage or dimension) return the reciprocal of the child’s value. if numeric, ok := c.term.(*calcNumeric); ok && numeric.unit == "" { numeric.number = 1 / numeric.number return numeric } // If root’s child is an Invert node, return the child’s child. if invert, ok := c.term.(*calcInvert); ok { return invert.term } return c } func (c *calcNumeric) partiallySimplify() calcTerm { return c } func (c *calcValue) partiallySimplify() calcTerm { return c } func tryToParseCalcTerm(tokens []css_ast.Token) calcTerm { // Specification: https://www.w3.org/TR/css-values-4/#calc-internal terms := make([]calcTerm, len(tokens)) for i, token := range tokens { var term calcTerm if token.Kind == css_lexer.TFunction && token.Text == "var" { // Using "var()" should bail because it can expand to any number of tokens return nil } else if token.Kind == css_lexer.TOpenParen || (token.Kind == css_lexer.TFunction && token.Text == "calc") { term = tryToParseCalcTerm(*token.Children) if term == nil { return nil } } else if token.Kind == css_lexer.TNumber { if number, err := strconv.ParseFloat(token.Text, 64); err == nil { term = &calcNumeric{number: number} } else { term = &calcValue{token: token} } } else if token.Kind == css_lexer.TPercentage { if number, err := strconv.ParseFloat(token.PercentageValue(), 64); err == nil { term = &calcNumeric{number: number, unit: "%"} } else { term = &calcValue{token: token} } } else if token.Kind == css_lexer.TDimension { if number, err := strconv.ParseFloat(token.DimensionValue(), 64); err == nil { term = &calcNumeric{number: number, unit: token.DimensionUnit()} } else { term = &calcValue{token: token} } } else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "Infinity") { term = &calcNumeric{number: math.Inf(1)} } else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "-Infinity") { term = &calcNumeric{number: math.Inf(-1)} } else if token.Kind == css_lexer.TIdent && strings.EqualFold(token.Text, "NaN") { term = &calcNumeric{number: math.NaN()} } else { term = &calcValue{ token: token, // From the specification: "In addition, whitespace is required on both sides of the // + and - operators. (The * and / operators can be used without white space around them.)" isInvalidPlusOrMinus: i > 0 && i+1 < len(tokens) && (token.Kind == css_lexer.TDelimPlus || token.Kind == css_lexer.TDelimMinus) && (((token.Whitespace&css_ast.WhitespaceBefore) == 0 && (tokens[i-1].Whitespace&css_ast.WhitespaceAfter) == 0) || (token.Whitespace&css_ast.WhitespaceAfter) == 0 && (tokens[i+1].Whitespace&css_ast.WhitespaceBefore) == 0), } } terms[i] = term } // Collect children into Product and Invert nodes first := 1 for first+1 < len(terms) { // If this is a "*" or "/" operator if value, ok := terms[first].(*calcValue); ok && (value.token.Kind == css_lexer.TDelimAsterisk || value.token.Kind == css_lexer.TDelimSlash) { // Scan over the run last := first for last+3 < len(terms) { if value, ok := terms[last+2].(*calcValue); ok && (value.token.Kind == css_lexer.TDelimAsterisk || value.token.Kind == css_lexer.TDelimSlash) { last += 2 } else { break } } // Generate a node for the run product := calcProduct{terms: make([]calcTerm, (last-first)/2+2)} for i := range product.terms { term := terms[first+i*2-1] if i > 0 && terms[first+i*2-2].(*calcValue).token.Kind == css_lexer.TDelimSlash { term = &calcInvert{term: term} } product.terms[i] = term } // Replace the run with a single node terms[first-1] = &product terms = append(terms[:first], terms[last+2:]...) continue } first++ } // Collect children into Sum and Negate nodes first = 1 for first+1 < len(terms) { // If this is a "+" or "-" operator if value, ok := terms[first].(*calcValue); ok && !value.isInvalidPlusOrMinus && (value.token.Kind == css_lexer.TDelimPlus || value.token.Kind == css_lexer.TDelimMinus) { // Scan over the run last := first for last+3 < len(terms) { if value, ok := terms[last+2].(*calcValue); ok && !value.isInvalidPlusOrMinus && (value.token.Kind == css_lexer.TDelimPlus || value.token.Kind == css_lexer.TDelimMinus) { last += 2 } else { break } } // Generate a node for the run sum := calcSum{terms: make([]calcTerm, (last-first)/2+2)} for i := range sum.terms { term := terms[first+i*2-1] if i > 0 && terms[first+i*2-2].(*calcValue).token.Kind == css_lexer.TDelimMinus { term = &calcNegate{term: term} } sum.terms[i] = term } // Replace the run with a single node terms[first-1] = &sum terms = append(terms[:first], terms[last+2:]...) continue } first++ } // This only succeeds if everything reduces to a single term if len(terms) == 1 { return terms[0] } return nil } ================================================ FILE: lib/esbuild/css_printer/astro_features.go ================================================ package css_printer import ( "fmt" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" ) func (p *printer) printScopedSelector() bool { var str string if p.options.ScopeStrategy == ScopeStrategyWhere { str = fmt.Sprintf(":where(.astro-%s)", p.options.Scope) } else if p.options.ScopeStrategy == ScopeStrategyAttribute { str = fmt.Sprintf("[data-astro-cid-%s]", p.options.Scope) } else { str = fmt.Sprintf(".astro-%s", p.options.Scope) } p.print(str) return true } func (p *printer) printCompoundSelector(sel css_ast.CompoundSelector, isFirst bool, isLast bool, shouldScope bool) { scoped := false if !isFirst && sel.Combinator == "" { // A space is required in between compound selectors if there is no // combinator in the middle. It's fine to convert "a + b" into "a+b" // but not to convert "a b" into "ab". p.print(" ") } if sel.NestingSelector == css_ast.NestingSelectorPrefix { p.print("&") scoped = true } if sel.Combinator != "" { if !p.options.MinifyWhitespace { p.print(" ") } p.print(sel.Combinator) if !p.options.MinifyWhitespace { p.print(" ") } } if sel.TypeSelector != nil { whitespace := mayNeedWhitespaceAfter if len(sel.SubclassSelectors) > 0 { // There is no chance of whitespace before a subclass selector or pseudo // class selector whitespace = canDiscardWhitespaceAfter } if sel.TypeSelector.Name.Text == "*" { if shouldScope { scoped = p.printScopedSelector() } else { p.printNamespacedName(*sel.TypeSelector, whitespace) } } else { p.printNamespacedName(*sel.TypeSelector, whitespace) } switch sel.TypeSelector.Name.Text { case "body", "html": scoped = true default: if !scoped && shouldScope { scoped = p.printScopedSelector() } } } var onlyPseudoSubclassSelectors *bool for i, sub := range sel.SubclassSelectors { whitespace := mayNeedWhitespaceAfter // There is no chance of whitespace between subclass selectors if i+1 < len(sel.SubclassSelectors) { whitespace = canDiscardWhitespaceAfter } switch s := sub.(type) { case *css_ast.SSHash: p.print("#") // This deliberately does not use identHash. From the specification: // "In <id-selector>, the <hash-token>'s value must be an identifier." p.printIdent(s.Name, identNormal, whitespace) if !scoped && shouldScope { scoped = p.printScopedSelector() } case *css_ast.SSClass: p.print(".") p.printIdent(s.Name, identNormal, whitespace) if !scoped && shouldScope { scoped = p.printScopedSelector() } case *css_ast.SSAttribute: if !scoped && shouldScope { scoped = p.printScopedSelector() } p.print("[") p.printNamespacedName(s.NamespacedName, canDiscardWhitespaceAfter) if s.MatcherOp != "" { p.print(s.MatcherOp) printAsIdent := false // Print the value as an identifier if it's possible if css_lexer.WouldStartIdentifierWithoutEscapes(s.MatcherValue) { printAsIdent = true for _, c := range s.MatcherValue { if !css_lexer.IsNameContinue(c) { printAsIdent = false break } } } if printAsIdent { p.printIdent(s.MatcherValue, identNormal, canDiscardWhitespaceAfter) } else { p.printQuoted(s.MatcherValue) } } if s.MatcherModifier != 0 { p.print(" ") p.print(string(rune(s.MatcherModifier))) } p.print("]") case *css_ast.SSPseudoClass: if sel.TypeSelector == nil && onlyPseudoSubclassSelectors == nil { onlyPseudoSubclassSelectors = new(bool) *onlyPseudoSubclassSelectors = true for _, ss := range sel.SubclassSelectors { _, ok := ss.(*css_ast.SSPseudoClass) if !ok { *onlyPseudoSubclassSelectors = false break } } } // If there is no type selector and all subclass selectors are pseudo // selectors, we need to add the scope before the first pseudo selector. if !scoped && shouldScope && sel.TypeSelector == nil && *onlyPseudoSubclassSelectors && i == 0 && s.Name != "global" && s.Name != "root" { scoped = p.printScopedSelector() } p.printPseudoClassSelector(*s, whitespace) if s.Name == "global" || s.Name == "root" { scoped = true } } } if !scoped && shouldScope { p.printScopedSelector() } // It doesn't matter where the "&" goes since all non-prefix cases are // treated the same. This just always puts it as a suffix for simplicity. if sel.NestingSelector == css_ast.NestingSelectorPresentButNotPrefix { p.print("&") } } func (p *printer) printPseudoClassSelector(pseudo css_ast.SSPseudoClass, whitespace trailingWhitespace) { if pseudo.Name == "global" { if len(pseudo.Args) > 0 { p.printTokens(pseudo.Args, printTokensOpts{}) } else { p.printIdent(pseudo.Name, identNormal, whitespace) } } else { if pseudo.IsElement { p.print("::") } else { p.print(":") } if len(pseudo.Args) > 0 { p.printIdent(pseudo.Name, identNormal, canDiscardWhitespaceAfter) p.print("(") p.printTokens(pseudo.Args, printTokensOpts{}) p.print(")") } else { p.printIdent(pseudo.Name, identNormal, whitespace) } } } ================================================ FILE: lib/esbuild/css_printer/css_printer.go ================================================ package css_printer import ( "fmt" "strings" "unicode/utf8" "github.com/withastro/compiler/lib/esbuild/ast" "github.com/withastro/compiler/lib/esbuild/config" "github.com/withastro/compiler/lib/esbuild/css_ast" "github.com/withastro/compiler/lib/esbuild/css_lexer" "github.com/withastro/compiler/lib/esbuild/helpers" "github.com/withastro/compiler/lib/esbuild/sourcemap" ) const quoteForURL byte = 0 type printer struct { options Options importRecords []ast.ImportRecord css []byte extractedLegalComments map[string]bool builder sourcemap.ChunkBuilder selectorRuleDepth int } type ScopeStrategy uint8 const ( ScopeStrategyWhere ScopeStrategy = iota ScopeStrategyClass ScopeStrategyAttribute ScopeStrategy = iota ) type Options struct { // This will be present if the input file had a source map. In that case we // want to map all the way back to the original input file(s). InputSourceMap *sourcemap.SourceMap // If we're writing out a source map, this table of line start indices lets // us do binary search on to figure out what line a given AST node came from LineOffsetTables []sourcemap.LineOffsetTable MinifyWhitespace bool ASCIIOnly bool AddSourceMappings bool LegalComments config.LegalComments Scope string ScopeStrategy ScopeStrategy } type PrintResult struct { CSS []byte ExtractedLegalComments map[string]bool SourceMapChunk sourcemap.Chunk } func Print(tree css_ast.AST, options Options) PrintResult { p := printer{ options: options, importRecords: tree.ImportRecords, builder: sourcemap.MakeChunkBuilder(options.InputSourceMap, options.LineOffsetTables), } for _, rule := range tree.Rules { p.printRule(rule, 0, false) } return PrintResult{ CSS: p.css, ExtractedLegalComments: p.extractedLegalComments, SourceMapChunk: p.builder.GenerateChunk(p.css), } } func (p *printer) printRule(rule css_ast.Rule, indent int32, omitTrailingSemicolon bool) { if r, ok := rule.Data.(*css_ast.RComment); ok { switch p.options.LegalComments { case config.LegalCommentsNone: return case config.LegalCommentsEndOfFile, config.LegalCommentsLinkedWithComment, config.LegalCommentsExternalWithoutComment: if p.extractedLegalComments == nil { p.extractedLegalComments = make(map[string]bool) } p.extractedLegalComments[r.Text] = true return } } if p.options.AddSourceMappings { p.builder.AddSourceMapping(rule.Loc, p.css) } if !p.options.MinifyWhitespace { p.printIndent(indent) } switch r := rule.Data.(type) { case *css_ast.RAtCharset: // It's not valid to remove the space in between these two tokens p.print("@charset ") // It's not valid to print the string with single quotes p.printQuotedWithQuote(r.Encoding, '"') p.print(";") case *css_ast.RAtImport: if p.options.MinifyWhitespace { p.print("@import") } else { p.print("@import ") } p.printQuoted(p.importRecords[r.ImportRecordIndex].Path.Text) p.printTokens(r.ImportConditions, printTokensOpts{}) p.print(";") case *css_ast.RAtKeyframes: p.print("@") p.printIdent(r.AtToken, identNormal, mayNeedWhitespaceAfter) p.print(" ") if r.Name == "" { p.print("\"\"") } else { p.printIdent(r.Name, identNormal, canDiscardWhitespaceAfter) } if !p.options.MinifyWhitespace { p.print(" ") } if p.options.MinifyWhitespace { p.print("{") } else { p.print("{\n") } indent++ for _, block := range r.Blocks { if !p.options.MinifyWhitespace { p.printIndent(indent) } for i, sel := range block.Selectors { if i > 0 { if p.options.MinifyWhitespace { p.print(",") } else { p.print(", ") } } p.print(sel) } if !p.options.MinifyWhitespace { p.print(" ") } p.printRuleBlock(block.Rules, indent) if !p.options.MinifyWhitespace { p.print("\n") } } indent-- if !p.options.MinifyWhitespace { p.printIndent(indent) } p.print("}") case *css_ast.RKnownAt: p.print("@") whitespace := mayNeedWhitespaceAfter if len(r.Prelude) == 0 { whitespace = canDiscardWhitespaceAfter } p.printIdent(r.AtToken, identNormal, whitespace) if (!p.options.MinifyWhitespace && r.Rules != nil) || len(r.Prelude) > 0 { p.print(" ") } p.printTokens(r.Prelude, printTokensOpts{}) if r.Rules == nil { p.print(";") } else { if !p.options.MinifyWhitespace && len(r.Prelude) > 0 { p.print(" ") } p.printRuleBlock(r.Rules, indent) } case *css_ast.RUnknownAt: p.print("@") whitespace := mayNeedWhitespaceAfter if len(r.Prelude) == 0 { whitespace = canDiscardWhitespaceAfter } p.printIdent(r.AtToken, identNormal, whitespace) if (!p.options.MinifyWhitespace && r.Block != nil) || len(r.Prelude) > 0 { p.print(" ") } p.printTokens(r.Prelude, printTokensOpts{}) if !p.options.MinifyWhitespace && r.Block != nil && len(r.Prelude) > 0 { p.print(" ") } if r.Block == nil { p.print(";") } else { p.printTokens(r.Block, printTokensOpts{}) } case *css_ast.RSelector: if r.HasAtNest { p.print("@nest") } p.printComplexSelectors(r.Selectors, indent, r.HasAtNest) if !p.options.MinifyWhitespace { p.print(" ") } p.selectorRuleDepth++ p.printRuleBlock(r.Rules, indent) p.selectorRuleDepth-- case *css_ast.RQualified: hasWhitespaceAfter := p.printTokens(r.Prelude, printTokensOpts{}) if !hasWhitespaceAfter && !p.options.MinifyWhitespace { p.print(" ") } p.printRuleBlock(r.Rules, indent) case *css_ast.RDeclaration: p.printIdent(r.KeyText, identNormal, canDiscardWhitespaceAfter) p.print(":") hasWhitespaceAfter := p.printTokens(r.Value, printTokensOpts{ indent: indent, isDeclaration: true, }) if r.Important { if !hasWhitespaceAfter && !p.options.MinifyWhitespace && len(r.Value) > 0 { p.print(" ") } p.print("!important") } if !omitTrailingSemicolon { p.print(";") } case *css_ast.RBadDeclaration: p.printTokens(r.Tokens, printTokensOpts{}) if !omitTrailingSemicolon { p.print(";") } case *css_ast.RComment: p.printIndentedComment(indent, r.Text) case *css_ast.RAtLayer: p.print("@layer") for i, parts := range r.Names { if i == 0 { p.print(" ") } else if !p.options.MinifyWhitespace { p.print(", ") } else { p.print(",") } p.print(strings.Join(parts, ".")) } if r.Rules == nil { p.print(";") } else { if !p.options.MinifyWhitespace { p.print(" ") } p.printRuleBlock(r.Rules, indent) } default: panic("Internal error") } if !p.options.MinifyWhitespace { p.print("\n") } } func (p *printer) printIndentedComment(indent int32, text string) { // Avoid generating a comment containing the character sequence "</style" text = helpers.EscapeClosingTag(text, "/style") // Re-indent multi-line comments for { newline := strings.IndexByte(text, '\n') if newline == -1 { break } p.print(text[:newline+1]) if !p.options.MinifyWhitespace { p.printIndent(indent) } text = text[newline+1:] } p.print(text) } func (p *printer) printRuleBlock(rules []css_ast.Rule, indent int32) { if p.options.MinifyWhitespace { p.print("{") } else { p.print("{\n") } for i, decl := range rules { omitTrailingSemicolon := p.options.MinifyWhitespace && i+1 == len(rules) p.printRule(decl, indent+1, omitTrailingSemicolon) } if !p.options.MinifyWhitespace { p.printIndent(indent) } p.print("}") } func (p *printer) printComplexSelectors(selectors []css_ast.ComplexSelector, indent int32, hasAtNest bool) { for i, complex := range selectors { if i > 0 { if p.options.MinifyWhitespace { p.print(",") } else { p.print(",\n") p.printIndent(indent) } } for j, compound := range complex.Selectors { shouldScope := true if p.selectorRuleDepth > 0 { hasNestingSelector := false for _, part := range complex.Selectors { if part.NestingSelector != css_ast.NestingSelectorNone { hasNestingSelector = true break } } if !hasNestingSelector { shouldScope = false } } p.printCompoundSelector(compound, (!hasAtNest || i != 0) && j == 0, j+1 == len(complex.Selectors), shouldScope) } } } func (p *printer) printNamespacedName(nsName css_ast.NamespacedName, whitespace trailingWhitespace) { if nsName.NamespacePrefix != nil { switch nsName.NamespacePrefix.Kind { case css_lexer.TIdent: p.printIdent(nsName.NamespacePrefix.Text, identNormal, canDiscardWhitespaceAfter) case css_lexer.TDelimAsterisk: p.print("*") default: panic("Internal error") } p.print("|") } switch nsName.Name.Kind { case css_lexer.TIdent: p.printIdent(nsName.Name.Text, identNormal, whitespace) case css_lexer.TDelimAsterisk: p.print("*") case css_lexer.TDelimAmpersand: p.print("&") default: panic("Internal error") } } func (p *printer) print(text string) { p.css = append(p.css, text...) } func bestQuoteCharForString(text string, forURL bool) byte { forURLCost := 0 singleCost := 2 doubleCost := 2 for _, c := range text { switch c { case '\'': forURLCost++ singleCost++ case '"': forURLCost++ doubleCost++ case '(', ')', ' ', '\t': forURLCost++ case '\\', '\n', '\r', '\f': forURLCost++ singleCost++ doubleCost++ } } // Quotes can sometimes be omitted for URL tokens if forURL && forURLCost < singleCost && forURLCost < doubleCost { return quoteForURL } // Prefer double quotes to single quotes if there is no cost difference if singleCost < doubleCost { return '\'' } return '"' } func (p *printer) printQuoted(text string) { p.printQuotedWithQuote(text, bestQuoteCharForString(text, false)) } type escapeKind uint8 const ( escapeNone escapeKind = iota escapeBackslash escapeHex ) func (p *printer) printWithEscape(c rune, escape escapeKind, remainingText string, mayNeedWhitespaceAfter bool) { var temp [utf8.UTFMax]byte if escape == escapeBackslash && ((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')) { // Hexadecimal characters cannot use a plain backslash escape escape = escapeHex } switch escape { case escapeNone: width := utf8.EncodeRune(temp[:], c) p.css = append(p.css, temp[:width]...) case escapeBackslash: p.css = append(p.css, '\\') width := utf8.EncodeRune(temp[:], c) p.css = append(p.css, temp[:width]...) case escapeHex: text := fmt.Sprintf("\\%x", c) p.css = append(p.css, text...) // Make sure the next character is not interpreted as part of the escape sequence if len(text) < 1+6 { if next := utf8.RuneLen(c); next < len(remainingText) { c = rune(remainingText[next]) if c == ' ' || c == '\t' || (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') { p.css = append(p.css, ' ') } } else if mayNeedWhitespaceAfter { // If the last character is a hexadecimal escape, print a space afterwards // for the escape sequence to consume. That way we're sure it won't // accidentally consume a semantically significant space afterward. p.css = append(p.css, ' ') } } } } func (p *printer) printQuotedWithQuote(text string, quote byte) { if quote != quoteForURL { p.css = append(p.css, quote) } for i, c := range text { escape := escapeNone switch c { case '\x00', '\r', '\n', '\f': // Use a hexadecimal escape for characters that would be invalid escapes escape = escapeHex case '\\', rune(quote): escape = escapeBackslash case '(', ')', ' ', '\t', '"', '\'': // These characters must be escaped in URL tokens if quote == quoteForURL { escape = escapeBackslash } case '/': // Avoid generating the sequence "</style" in CSS code if i >= 1 && text[i-1] == '<' && i+6 <= len(text) && strings.EqualFold(text[i+1:i+6], "style") { escape = escapeBackslash } default: if (p.options.ASCIIOnly && c >= 0x80) || c == '\uFEFF' { escape = escapeHex } } p.printWithEscape(c, escape, text[i:], false) } if quote != quoteForURL { p.css = append(p.css, quote) } } type identMode uint8 const ( identNormal identMode = iota identHash identDimensionUnit ) type trailingWhitespace uint8 const ( mayNeedWhitespaceAfter trailingWhitespace = iota canDiscardWhitespaceAfter ) func (p *printer) printIdent(text string, mode identMode, whitespace trailingWhitespace) { for i, c := range text { escape := escapeNone if p.options.ASCIIOnly && c >= 0x80 { escape = escapeHex } else if c == '\r' || c == '\n' || c == '\f' || c == '\uFEFF' { // Use a hexadecimal escape for characters that would be invalid escapes escape = escapeHex } else { // Escape non-identifier characters if !css_lexer.IsNameContinue(c) { escape = escapeBackslash } // Special escape behavior for the first character if i == 0 { switch mode { case identNormal: if !css_lexer.WouldStartIdentifierWithoutEscapes(text) { escape = escapeBackslash } case identDimensionUnit: if !css_lexer.WouldStartIdentifierWithoutEscapes(text) { escape = escapeBackslash } else if c >= '0' && c <= '9' { // Unit: "2x" escape = escapeHex } else if c == 'e' || c == 'E' { if len(text) >= 2 && text[1] >= '0' && text[1] <= '9' { // Unit: "e2x" escape = escapeBackslash } else if len(text) >= 3 && text[1] == '-' && text[2] >= '0' && text[2] <= '9' { // Unit: "e-2x" escape = escapeBackslash } } } } } // If the last character is a hexadecimal escape, print a space afterwards // for the escape sequence to consume. That way we're sure it won't // accidentally consume a semantically significant space afterward. mayNeedWhitespaceAfter := whitespace == mayNeedWhitespaceAfter && escape != escapeNone && i+utf8.RuneLen(c) == len(text) p.printWithEscape(c, escape, text[i:], mayNeedWhitespaceAfter) } } func (p *printer) printIndent(indent int32) { for i, n := 0, int(indent); i < n; i++ { p.css = append(p.css, " "...) } } type printTokensOpts struct { indent int32 isDeclaration bool } func (p *printer) printTokens(tokens []css_ast.Token, opts printTokensOpts) bool { hasWhitespaceAfter := len(tokens) > 0 && (tokens[0].Whitespace&css_ast.WhitespaceBefore) != 0 // Pretty-print long comma-separated declarations of 3 or more items isMultiLineValue := false if !p.options.MinifyWhitespace && opts.isDeclaration { commaCount := 0 for _, t := range tokens { if t.Kind == css_lexer.TComma { commaCount++ } } isMultiLineValue = commaCount >= 2 } for i, t := range tokens { if t.Kind == css_lexer.TWhitespace { hasWhitespaceAfter = true continue } if hasWhitespaceAfter { if isMultiLineValue && (i == 0 || tokens[i-1].Kind == css_lexer.TComma) { p.print("\n") p.printIndent(opts.indent + 1) } else { p.print(" ") } } hasWhitespaceAfter = (t.Whitespace&css_ast.WhitespaceAfter) != 0 || (i+1 < len(tokens) && (tokens[i+1].Whitespace&css_ast.WhitespaceBefore) != 0) whitespace := mayNeedWhitespaceAfter if !hasWhitespaceAfter { whitespace = canDiscardWhitespaceAfter } switch t.Kind { case css_lexer.TIdent: p.printIdent(t.Text, identNormal, whitespace) case css_lexer.TFunction: p.printIdent(t.Text, identNormal, whitespace) p.print("(") case css_lexer.TDimension: p.print(t.DimensionValue()) p.printIdent(t.DimensionUnit(), identDimensionUnit, whitespace) case css_lexer.TAtKeyword: p.print("@") p.printIdent(t.Text, identNormal, whitespace) case css_lexer.THash: p.print("#") p.printIdent(t.Text, identHash, whitespace) case css_lexer.TString: p.printQuoted(t.Text) case css_lexer.TURL: text := p.importRecords[t.ImportRecordIndex].Path.Text p.print("url(") p.printQuotedWithQuote(text, bestQuoteCharForString(text, true)) p.print(")") default: p.print(t.Text) } if t.Children != nil { p.printTokens(*t.Children, printTokensOpts{}) switch t.Kind { case css_lexer.TFunction: p.print(")") case css_lexer.TOpenParen: p.print(")") case css_lexer.TOpenBrace: p.print("}") case css_lexer.TOpenBracket: p.print("]") } } } if hasWhitespaceAfter { p.print(" ") } return hasWhitespaceAfter } ================================================ FILE: lib/esbuild/css_printer/css_printer_test.go ================================================ package css_printer import ( "testing" "github.com/withastro/compiler/lib/esbuild/css_parser" "github.com/withastro/compiler/lib/esbuild/logger" "github.com/withastro/compiler/lib/esbuild/test" ) func assertEqual(t *testing.T, a interface{}, b interface{}) { t.Helper() if a != b { t.Fatalf("%s != %s", a, b) } } func expectPrintedCommon(t *testing.T, name string, contents string, expected string, options Options) { t.Helper() t.Run(name, func(t *testing.T) { t.Helper() log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug) tree := css_parser.Parse(log, test.SourceForTest(contents), css_parser.Options{ MinifyWhitespace: options.MinifyWhitespace, }) msgs := log.Done() text := "" for _, msg := range msgs { if msg.Kind == logger.Error { text += msg.String(logger.OutputOptions{}, logger.TerminalInfo{}) } } assertEqual(t, text, "") result := Print(tree, options) assertEqual(t, string(result.CSS), expected) }) } func expectPrinted(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents, contents, expected, Options{}) } func expectPrintedMinify(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [minified]", contents, expected, Options{ MinifyWhitespace: true, }) } func expectPrintedASCII(t *testing.T, contents string, expected string) { t.Helper() expectPrintedCommon(t, contents+" [ascii]", contents, expected, Options{ ASCIIOnly: true, }) } func expectPrintedString(t *testing.T, stringValue string, expected string) { t.Helper() t.Run(stringValue, func(t *testing.T) { t.Helper() p := printer{} p.printQuoted(stringValue) assertEqual(t, string(p.css), expected) }) } func TestStringQuote(t *testing.T) { expectPrintedString(t, "", "\"\"") expectPrintedString(t, "foo", "\"foo\"") expectPrintedString(t, "f\"o", "'f\"o'") expectPrintedString(t, "f'\"'o", "\"f'\\\"'o\"") expectPrintedString(t, "f\"'\"o", "'f\"\\'\"o'") expectPrintedString(t, "f\\o", "\"f\\\\o\"") expectPrintedString(t, "f\ro", "\"f\\do\"") expectPrintedString(t, "f\no", "\"f\\ao\"") expectPrintedString(t, "f\fo", "\"f\\co\"") expectPrintedString(t, "f\r\no", "\"f\\d\\ao\"") expectPrintedString(t, "f\r0", "\"f\\d 0\"") expectPrintedString(t, "f\n0", "\"f\\a 0\"") expectPrintedString(t, "f\n ", "\"f\\a \"") expectPrintedString(t, "f\n\t", "\"f\\a \t\"") expectPrintedString(t, "f\nf", "\"f\\a f\"") expectPrintedString(t, "f\nF", "\"f\\a F\"") expectPrintedString(t, "f\ng", "\"f\\ag\"") expectPrintedString(t, "f\nG", "\"f\\aG\"") expectPrintedString(t, "f\x01o", "\"f\x01o\"") expectPrintedString(t, "f\to", "\"f\to\"") expectPrintedString(t, "</script>", "\"</script>\"") expectPrintedString(t, "</style>", "\"<\\/style>\"") expectPrintedString(t, "</style", "\"<\\/style\"") expectPrintedString(t, "</STYLE", "\"<\\/STYLE\"") expectPrintedString(t, "</StYlE", "\"<\\/StYlE\"") expectPrintedString(t, ">/style", "\">/style\"") expectPrintedString(t, ">/STYLE", "\">/STYLE\"") expectPrintedString(t, ">/StYlE", "\">/StYlE\"") } func TestURLQuote(t *testing.T) { expectPrinted(t, "* { background: url('foo') }", "* {\n background: url(foo);\n}\n") expectPrinted(t, "* { background: url('f o') }", "* {\n background: url(f\\ o);\n}\n") expectPrinted(t, "* { background: url('f o') }", "* {\n background: url(\"f o\");\n}\n") expectPrinted(t, "* { background: url('foo)') }", "* {\n background: url(foo\\));\n}\n") expectPrinted(t, "* { background: url('(foo') }", "* {\n background: url(\\(foo);\n}\n") expectPrinted(t, "* { background: url('(foo)') }", "* {\n background: url(\"(foo)\");\n}\n") expectPrinted(t, "* { background: url('\"foo\"') }", "* {\n background: url('\"foo\"');\n}\n") } func TestImportant(t *testing.T) { expectPrinted(t, "a { b: c!important }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c!important; }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c! important }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c! important; }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c ! important }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c ! important; }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c !IMPORTANT; }", "a {\n b: c !important;\n}\n") expectPrinted(t, "a { b: c !ImPoRtAnT; }", "a {\n b: c !important;\n}\n") expectPrintedMinify(t, "a { b: c !important }", "a{b:c!important}") } func TestSelector(t *testing.T) { expectPrintedMinify(t, "a + b c > d ~ e{}", "a+b c>d~e{}") expectPrinted(t, ":unknown( x (a+b), 'c' ) {}", ":unknown(x (a+b), \"c\") {\n}\n") expectPrinted(t, ":unknown( x (a-b), 'c' ) {}", ":unknown(x (a-b), \"c\") {\n}\n") expectPrinted(t, ":unknown( x (a,b), 'c' ) {}", ":unknown(x (a, b), \"c\") {\n}\n") expectPrinted(t, ":unknown( x ( a + b ), 'c' ) {}", ":unknown(x (a + b), \"c\") {\n}\n") expectPrinted(t, ":unknown( x ( a - b ), 'c' ) {}", ":unknown(x (a - b), \"c\") {\n}\n") expectPrinted(t, ":unknown( x ( a , b ), 'c' ) {}", ":unknown(x (a, b), \"c\") {\n}\n") expectPrintedMinify(t, ":unknown( x (a+b), 'c' ) {}", ":unknown(x (a+b),\"c\"){}") expectPrintedMinify(t, ":unknown( x (a-b), 'c' ) {}", ":unknown(x (a-b),\"c\"){}") expectPrintedMinify(t, ":unknown( x (a,b), 'c' ) {}", ":unknown(x (a,b),\"c\"){}") expectPrintedMinify(t, ":unknown( x ( a + b ), 'c' ) {}", ":unknown(x (a + b),\"c\"){}") expectPrintedMinify(t, ":unknown( x ( a - b ), 'c' ) {}", ":unknown(x (a - b),\"c\"){}") expectPrintedMinify(t, ":unknown( x ( a , b ), 'c' ) {}", ":unknown(x (a,b),\"c\"){}") } func TestNestedSelector(t *testing.T) { expectPrintedMinify(t, "a { &b {} }", "a{&b{}}") expectPrintedMinify(t, "a { & b {} }", "a{& b{}}") expectPrintedMinify(t, "a { & :b {} }", "a{& :b{}}") expectPrintedMinify(t, "& a & b & c {}", "& a & b & c{}") } func TestBadQualifiedRules(t *testing.T) { expectPrinted(t, ";", "; {\n}\n") expectPrinted(t, "$bad: rule;", "$bad: rule; {\n}\n") expectPrinted(t, "a {}; b {};", "a {\n}\n; b {\n}\n; {\n}\n") expectPrinted(t, "a { div.major { color: blue } color: red }", "a {\n div.major { color: blue } color: red;\n}\n") expectPrinted(t, "a { div:hover { color: blue } color: red }", "a {\n div: hover { color: blue } color: red;\n}\n") expectPrinted(t, "a { div:hover { color: blue }; color: red }", "a {\n div: hover { color: blue };\n color: red;\n}\n") expectPrinted(t, "$bad{ color: red }", "$bad {\n color: red;\n}\n") expectPrinted(t, "$bad { color: red }", "$bad {\n color: red;\n}\n") expectPrinted(t, "$bad foo{ color: red }", "$bad foo {\n color: red;\n}\n") expectPrinted(t, "$bad foo { color: red }", "$bad foo {\n color: red;\n}\n") expectPrintedMinify(t, "$bad{ color: red }", "$bad{color:red}") expectPrintedMinify(t, "$bad { color: red }", "$bad{color:red}") expectPrintedMinify(t, "$bad foo{ color: red }", "$bad foo{color:red}") expectPrintedMinify(t, "$bad foo { color: red }", "$bad foo{color:red}") } func TestDeclaration(t *testing.T) { expectPrinted(t, "* { unknown: x (a+b) }", "* {\n unknown: x (a+b);\n}\n") expectPrinted(t, "* { unknown: x (a-b) }", "* {\n unknown: x (a-b);\n}\n") expectPrinted(t, "* { unknown: x (a,b) }", "* {\n unknown: x (a, b);\n}\n") expectPrinted(t, "* { unknown: x ( a + b ) }", "* {\n unknown: x (a + b);\n}\n") expectPrinted(t, "* { unknown: x ( a - b ) }", "* {\n unknown: x (a - b);\n}\n") expectPrinted(t, "* { unknown: x ( a , b ) }", "* {\n unknown: x (a, b);\n}\n") expectPrintedMinify(t, "* { unknown: x (a+b) }", "*{unknown:x (a+b)}") expectPrintedMinify(t, "* { unknown: x (a-b) }", "*{unknown:x (a-b)}") expectPrintedMinify(t, "* { unknown: x (a,b) }", "*{unknown:x (a,b)}") expectPrintedMinify(t, "* { unknown: x ( a + b ) }", "*{unknown:x (a + b)}") expectPrintedMinify(t, "* { unknown: x ( a - b ) }", "*{unknown:x (a - b)}") expectPrintedMinify(t, "* { unknown: x ( a , b ) }", "*{unknown:x (a,b)}") // Pretty-print long lists in declarations expectPrinted(t, "a { b: c, d }", "a {\n b: c, d;\n}\n") expectPrinted(t, "a { b: c, (d, e) }", "a {\n b: c, (d, e);\n}\n") expectPrinted(t, "a { b: c, d, e }", "a {\n b:\n c,\n d,\n e;\n}\n") expectPrinted(t, "a { b: c, (d, e), f }", "a {\n b:\n c,\n (d, e),\n f;\n}\n") expectPrintedMinify(t, "a { b: c, d }", "a{b:c,d}") expectPrintedMinify(t, "a { b: c, (d, e) }", "a{b:c,(d,e)}") expectPrintedMinify(t, "a { b: c, d, e }", "a{b:c,d,e}") expectPrintedMinify(t, "a { b: c, (d, e), f }", "a{b:c,(d,e),f}") } func TestVerbatimWhitespace(t *testing.T) { expectPrinted(t, "*{--x:}", "* {\n --x:;\n}\n") expectPrinted(t, "*{--x: }", "* {\n --x: ;\n}\n") expectPrinted(t, "* { --x:; }", "* {\n --x:;\n}\n") expectPrinted(t, "* { --x: ; }", "* {\n --x: ;\n}\n") expectPrintedMinify(t, "*{--x:}", "*{--x:}") expectPrintedMinify(t, "*{--x: }", "*{--x: }") expectPrintedMinify(t, "* { --x:; }", "*{--x:}") expectPrintedMinify(t, "* { --x: ; }", "*{--x: }") expectPrinted(t, "*{--x:!important}", "* {\n --x:!important;\n}\n") expectPrinted(t, "*{--x: !important}", "* {\n --x: !important;\n}\n") expectPrinted(t, "*{ --x:!important }", "* {\n --x:!important;\n}\n") expectPrinted(t, "*{ --x: !important }", "* {\n --x: !important;\n}\n") expectPrinted(t, "* { --x:!important; }", "* {\n --x:!important;\n}\n") expectPrinted(t, "* { --x: !important; }", "* {\n --x: !important;\n}\n") expectPrinted(t, "* { --x:! important ; }", "* {\n --x:!important;\n}\n") expectPrinted(t, "* { --x: ! important ; }", "* {\n --x: !important;\n}\n") expectPrintedMinify(t, "*{--x:!important}", "*{--x:!important}") expectPrintedMinify(t, "*{--x: !important}", "*{--x: !important}") expectPrintedMinify(t, "*{ --x:!important }", "*{--x:!important}") expectPrintedMinify(t, "*{ --x: !important }", "*{--x: !important}") expectPrintedMinify(t, "* { --x:!important; }", "*{--x:!important}") expectPrintedMinify(t, "* { --x: !important; }", "*{--x: !important}") expectPrintedMinify(t, "* { --x:! important ; }", "*{--x:!important}") expectPrintedMinify(t, "* { --x: ! important ; }", "*{--x: !important}") expectPrinted(t, "* { --x:y; }", "* {\n --x:y;\n}\n") expectPrinted(t, "* { --x: y; }", "* {\n --x: y;\n}\n") expectPrinted(t, "* { --x:y ; }", "* {\n --x:y ;\n}\n") expectPrinted(t, "* { --x:y, ; }", "* {\n --x:y, ;\n}\n") expectPrinted(t, "* { --x: var(y,); }", "* {\n --x: var(y,);\n}\n") expectPrinted(t, "* { --x: var(y, ); }", "* {\n --x: var(y, );\n}\n") expectPrintedMinify(t, "* { --x:y; }", "*{--x:y}") expectPrintedMinify(t, "* { --x: y; }", "*{--x: y}") expectPrintedMinify(t, "* { --x:y ; }", "*{--x:y }") expectPrintedMinify(t, "* { --x:y, ; }", "*{--x:y, }") expectPrintedMinify(t, "* { --x: var(y,); }", "*{--x: var(y,)}") expectPrintedMinify(t, "* { --x: var(y, ); }", "*{--x: var(y, )}") expectPrinted(t, "* { --x:(y); }", "* {\n --x:(y);\n}\n") expectPrinted(t, "* { --x:(y) ; }", "* {\n --x:(y) ;\n}\n") expectPrinted(t, "* { --x: (y); }", "* {\n --x: (y);\n}\n") expectPrinted(t, "* { --x:(y ); }", "* {\n --x:(y );\n}\n") expectPrinted(t, "* { --x:( y); }", "* {\n --x:( y);\n}\n") expectPrintedMinify(t, "* { --x:(y); }", "*{--x:(y)}") expectPrintedMinify(t, "* { --x:(y) ; }", "*{--x:(y) }") expectPrintedMinify(t, "* { --x: (y); }", "*{--x: (y)}") expectPrintedMinify(t, "* { --x:(y ); }", "*{--x:(y )}") expectPrintedMinify(t, "* { --x:( y); }", "*{--x:( y)}") expectPrinted(t, "* { --x:f(y); }", "* {\n --x:f(y);\n}\n") expectPrinted(t, "* { --x:f(y) ; }", "* {\n --x:f(y) ;\n}\n") expectPrinted(t, "* { --x: f(y); }", "* {\n --x: f(y);\n}\n") expectPrinted(t, "* { --x:f(y ); }", "* {\n --x:f(y );\n}\n") expectPrinted(t, "* { --x:f( y); }", "* {\n --x:f( y);\n}\n") expectPrintedMinify(t, "* { --x:f(y); }", "*{--x:f(y)}") expectPrintedMinify(t, "* { --x:f(y) ; }", "*{--x:f(y) }") expectPrintedMinify(t, "* { --x: f(y); }", "*{--x: f(y)}") expectPrintedMinify(t, "* { --x:f(y ); }", "*{--x:f(y )}") expectPrintedMinify(t, "* { --x:f( y); }", "*{--x:f( y)}") expectPrinted(t, "* { --x:[y]; }", "* {\n --x:[y];\n}\n") expectPrinted(t, "* { --x:[y] ; }", "* {\n --x:[y] ;\n}\n") expectPrinted(t, "* { --x: [y]; }", "* {\n --x: [y];\n}\n") expectPrinted(t, "* { --x:[y ]; }", "* {\n --x:[y ];\n}\n") expectPrinted(t, "* { --x:[ y]; }", "* {\n --x:[ y];\n}\n") expectPrintedMinify(t, "* { --x:[y]; }", "*{--x:[y]}") expectPrintedMinify(t, "* { --x:[y] ; }", "*{--x:[y] }") expectPrintedMinify(t, "* { --x: [y]; }", "*{--x: [y]}") expectPrintedMinify(t, "* { --x:[y ]; }", "*{--x:[y ]}") expectPrintedMinify(t, "* { --x:[ y]; }", "*{--x:[ y]}") expectPrinted(t, "* { --x:{y}; }", "* {\n --x:{y};\n}\n") expectPrinted(t, "* { --x:{y} ; }", "* {\n --x:{y} ;\n}\n") expectPrinted(t, "* { --x: {y}; }", "* {\n --x: {y};\n}\n") expectPrinted(t, "* { --x:{y }; }", "* {\n --x:{y };\n}\n") expectPrinted(t, "* { --x:{ y}; }", "* {\n --x:{ y};\n}\n") expectPrintedMinify(t, "* { --x:{y}; }", "*{--x:{y}}") expectPrintedMinify(t, "* { --x:{y} ; }", "*{--x:{y} }") expectPrintedMinify(t, "* { --x: {y}; }", "*{--x: {y}}") expectPrintedMinify(t, "* { --x:{y }; }", "*{--x:{y }}") expectPrintedMinify(t, "* { --x:{ y}; }", "*{--x:{ y}}") expectPrintedMinify(t, "@supports ( --x : y , z ) { a { color: red; } }", "@supports ( --x : y , z ){a{color:red}}") expectPrintedMinify(t, "@supports ( --x : ) { a { color: red; } }", "@supports ( --x : ){a{color:red}}") expectPrintedMinify(t, "@supports (--x: ) { a { color: red; } }", "@supports (--x: ){a{color:red}}") expectPrintedMinify(t, "@supports ( --x y , z ) { a { color: red; } }", "@supports (--x y,z){a{color:red}}") expectPrintedMinify(t, "@supports ( --x ) { a { color: red; } }", "@supports (--x){a{color:red}}") expectPrintedMinify(t, "@supports ( ) { a { color: red; } }", "@supports (){a{color:red}}") expectPrintedMinify(t, "@supports ( . --x : y , z ) { a { color: red; } }", "@supports (. --x : y,z){a{color:red}}") } func TestAtRule(t *testing.T) { expectPrintedMinify(t, "@unknown;", "@unknown;") expectPrintedMinify(t, "@unknown x;", "@unknown x;") expectPrintedMinify(t, "@unknown{}", "@unknown{}") expectPrintedMinify(t, "@unknown{\na: b;\nc: d;\n}", "@unknown{a: b; c: d;}") expectPrinted(t, "@unknown x{}", "@unknown x {}\n") expectPrinted(t, "@unknown x {}", "@unknown x {}\n") expectPrintedMinify(t, "@unknown x{}", "@unknown x{}") expectPrintedMinify(t, "@unknown x {}", "@unknown x{}") expectPrinted(t, "@unknown x ( a + b ) ;", "@unknown x (a + b);\n") expectPrinted(t, "@unknown x ( a - b ) ;", "@unknown x (a - b);\n") expectPrinted(t, "@unknown x ( a , b ) ;", "@unknown x (a, b);\n") expectPrintedMinify(t, "@unknown x ( a + b ) ;", "@unknown x (a + b);") expectPrintedMinify(t, "@unknown x ( a - b ) ;", "@unknown x (a - b);") expectPrintedMinify(t, "@unknown x ( a , b ) ;", "@unknown x (a,b);") } func TestAtCharset(t *testing.T) { expectPrinted(t, "@charset \"UTF-8\";", "@charset \"UTF-8\";\n") expectPrintedMinify(t, "@charset \"UTF-8\";", "@charset \"UTF-8\";") } func TestAtImport(t *testing.T) { expectPrinted(t, "@import\"foo.css\";", "@import \"foo.css\";\n") expectPrinted(t, "@import \"foo.css\";", "@import \"foo.css\";\n") expectPrinted(t, "@import url(foo.css);", "@import \"foo.css\";\n") expectPrinted(t, "@import url(\"foo.css\");", "@import \"foo.css\";\n") expectPrinted(t, "@import url(\"foo.css\") print;", "@import \"foo.css\" print;\n") expectPrintedMinify(t, "@import\"foo.css\";", "@import\"foo.css\";") expectPrintedMinify(t, "@import \"foo.css\";", "@import\"foo.css\";") expectPrintedMinify(t, "@import url(foo.css);", "@import\"foo.css\";") expectPrintedMinify(t, "@import url(\"foo.css\");", "@import\"foo.css\";") expectPrintedMinify(t, "@import url(\"foo.css\") print;", "@import\"foo.css\"print;") } func TestAtKeyframes(t *testing.T) { expectPrintedMinify(t, "@keyframes name { 0%, 50% { color: red } 25%, 75% { color: blue } }", "@keyframes name{0%,50%{color:red}25%,75%{color:blue}}") expectPrintedMinify(t, "@keyframes name { from { color: red } to { color: blue } }", "@keyframes name{from{color:red}to{color:blue}}") } func TestAtMedia(t *testing.T) { expectPrinted(t, "@media screen { div { color: red } }", "@media screen {\n div {\n color: red;\n }\n}\n") expectPrinted(t, "@media screen{div{color:red}}", "@media screen {\n div {\n color: red;\n }\n}\n") expectPrintedMinify(t, "@media screen { div { color: red } }", "@media screen{div{color:red}}") expectPrintedMinify(t, "@media screen{div{color:red}}", "@media screen{div{color:red}}") } func TestAtFontFace(t *testing.T) { expectPrinted(t, "@font-face { font-family: 'Open Sans'; src: url('OpenSans.woff') format('woff') }", "@font-face {\n font-family: \"Open Sans\";\n src: url(OpenSans.woff) format(\"woff\");\n}\n") expectPrintedMinify(t, "@font-face { font-family: 'Open Sans'; src: url('OpenSans.woff') format('woff') }", "@font-face{font-family:\"Open Sans\";src:url(OpenSans.woff) format(\"woff\")}") } func TestAtPage(t *testing.T) { expectPrinted(t, "@page { margin: 1cm }", "@page {\n margin: 1cm;\n}\n") expectPrinted(t, "@page :first { margin: 1cm }", "@page :first {\n margin: 1cm;\n}\n") expectPrintedMinify(t, "@page { margin: 1cm }", "@page{margin:1cm}") expectPrintedMinify(t, "@page :first { margin: 1cm }", "@page :first{margin:1cm}") } func TestAtStartingStyle(t *testing.T) { expectPrinted(t, "@starting-style { div { color: red } }", "@starting-style {\n div {\n color: red;\n }\n}\n") expectPrinted(t, "@starting-style{div{color:red}}", "@starting-style {\n div {\n color: red;\n }\n}\n") expectPrintedMinify(t, "@starting-style { div { color: red } }", "@starting-style{div{color:red}}") expectPrintedMinify(t, "@starting-style{div{color:red}}", "@starting-style{div{color:red}}") } func TestMsGridColumnsWhitespace(t *testing.T) { // Must not insert a space between the "]" and the "(" expectPrinted(t, "div { -ms-grid-columns: (1fr)[3] }", "div {\n -ms-grid-columns: (1fr)[3];\n}\n") expectPrinted(t, "div { -ms-grid-columns: 1fr (20px 1fr)[3] }", "div {\n -ms-grid-columns: 1fr (20px 1fr)[3];\n}\n") expectPrintedMinify(t, "div { -ms-grid-columns: (1fr)[3] }", "div{-ms-grid-columns:(1fr)[3]}") expectPrintedMinify(t, "div { -ms-grid-columns: 1fr (20px 1fr)[3] }", "div{-ms-grid-columns:1fr (20px 1fr)[3]}") } func TestASCII(t *testing.T) { expectPrintedASCII(t, "* { background: url(🐈) }", "* {\n background: url(\\1f408);\n}\n") expectPrintedASCII(t, "* { background: url(🐈6) }", "* {\n background: url(\\1f408 6);\n}\n") expectPrintedASCII(t, "* { background: url('🐈') }", "* {\n background: url(\\1f408);\n}\n") expectPrintedASCII(t, "* { background: url('🐈6') }", "* {\n background: url(\\1f408 6);\n}\n") expectPrintedASCII(t, "* { background: url('(🐈)') }", "* {\n background: url(\"(\\1f408)\");\n}\n") expectPrintedASCII(t, "* { background: url('(🐈6)') }", "* {\n background: url(\"(\\1f408 6)\");\n}\n") expectPrintedASCII(t, "div { 🐈: 🐈('🐈') }", "div {\n \\1f408: \\1f408(\"\\1f408\");\n}\n") expectPrintedASCII(t, "div { 🐈 : 🐈 ('🐈 ') }", "div {\n \\1f408: \\1f408 (\"\\1f408 \");\n}\n") expectPrintedASCII(t, "div { 🐈6: 🐈6('🐈6') }", "div {\n \\1f408 6: \\1f408 6(\"\\1f408 6\");\n}\n") expectPrintedASCII(t, "@🐈;", "@\\1f408;\n") expectPrintedASCII(t, "@🐈 {}", "@\\1f408 {}\n") expectPrintedASCII(t, "@🐈 x {}", "@\\1f408 x {}\n") expectPrintedASCII(t, "#🐈#x {}", "#\\1f408#x {\n}\n") expectPrintedASCII(t, "#🐈 #x {}", "#\\1f408 #x {\n}\n") expectPrintedASCII(t, "#🐈::x {}", "#\\1f408::x {\n}\n") expectPrintedASCII(t, "#🐈 ::x {}", "#\\1f408 ::x {\n}\n") expectPrintedASCII(t, ".🐈.x {}", ".\\1f408.x {\n}\n") expectPrintedASCII(t, ".🐈 .x {}", ".\\1f408 .x {\n}\n") expectPrintedASCII(t, ".🐈::x {}", ".\\1f408::x {\n}\n") expectPrintedASCII(t, ".🐈 ::x {}", ".\\1f408 ::x {\n}\n") expectPrintedASCII(t, "🐈|🐈.x {}", "\\1f408|\\1f408.x {\n}\n") expectPrintedASCII(t, "🐈|🐈 .x {}", "\\1f408|\\1f408 .x {\n}\n") expectPrintedASCII(t, "🐈|🐈::x {}", "\\1f408|\\1f408::x {\n}\n") expectPrintedASCII(t, "🐈|🐈 ::x {}", "\\1f408|\\1f408 ::x {\n}\n") expectPrintedASCII(t, "::🐈:x {}", "::\\1f408:x {\n}\n") expectPrintedASCII(t, "::🐈 :x {}", "::\\1f408 :x {\n}\n") expectPrintedASCII(t, "[🐈] {}", "[\\1f408] {\n}\n") expectPrintedASCII(t, "[🐈=🐈] {}", "[\\1f408=\\1f408] {\n}\n") expectPrintedASCII(t, "[🐈|🐈=🐈] {}", "[\\1f408|\\1f408=\\1f408] {\n}\n") // A space must be consumed after an escaped code point even with six digits expectPrintedASCII(t, ".\\10FFF abc:after { content: '\\10FFF abc' }", ".\\10fff abc:after {\n content: \"\\10fff abc\";\n}\n") expectPrintedASCII(t, ".\U00010FFFabc:after { content: '\U00010FFFabc' }", ".\\10fff abc:after {\n content: \"\\10fff abc\";\n}\n") expectPrintedASCII(t, ".\\10FFFFabc:after { content: '\\10FFFFabc' }", ".\\10ffffabc:after {\n content: \"\\10ffffabc\";\n}\n") expectPrintedASCII(t, ".\\10FFFF abc:after { content: '\\10FFFF abc' }", ".\\10ffffabc:after {\n content: \"\\10ffffabc\";\n}\n") expectPrintedASCII(t, ".\U0010FFFFabc:after { content: '\U0010FFFFabc' }", ".\\10ffffabc:after {\n content: \"\\10ffffabc\";\n}\n") // This character should always be escaped expectPrinted(t, ".\\FEFF:after { content: '\uFEFF' }", ".\\feff:after {\n content: \"\\feff\";\n}\n") } ================================================ FILE: lib/esbuild/esbuild.go ================================================ package esbuild ================================================ FILE: lib/esbuild/helpers/bitset.go ================================================ package helpers import "bytes" type BitSet struct { entries []byte } func NewBitSet(bitCount uint) BitSet { return BitSet{make([]byte, (bitCount+7)/8)} } func (bs BitSet) HasBit(bit uint) bool { return (bs.entries[bit/8] & (1 << (bit & 7))) != 0 } func (bs BitSet) SetBit(bit uint) { bs.entries[bit/8] |= 1 << (bit & 7) } func (bs BitSet) Equals(other BitSet) bool { return bytes.Equal(bs.entries, other.entries) } func (bs BitSet) String() string { return string(bs.entries) } ================================================ FILE: lib/esbuild/helpers/comment.go ================================================ package helpers import ( "strings" "unicode/utf8" ) func RemoveMultiLineCommentIndent(prefix string, text string) string { // Figure out the initial indent indent := 0 seekBackwardToNewline: for len(prefix) > 0 { c, size := utf8.DecodeLastRuneInString(prefix) switch c { case '\r', '\n', '\u2028', '\u2029': break seekBackwardToNewline } prefix = prefix[:len(prefix)-size] indent++ } // Split the comment into lines var lines []string start := 0 for i, c := range text { switch c { case '\r', '\n': // Don't double-append for Windows style "\r\n" newlines if start <= i { lines = append(lines, text[start:i]) } start = i + 1 // Ignore the second part of Windows style "\r\n" newlines if c == '\r' && start < len(text) && text[start] == '\n' { start++ } case '\u2028', '\u2029': lines = append(lines, text[start:i]) start = i + 3 } } lines = append(lines, text[start:]) // Find the minimum indent over all lines after the first line for _, line := range lines[1:] { lineIndent := 0 for _, c := range line { if c != ' ' && c != '\t' { break } lineIndent++ } if indent > lineIndent { indent = lineIndent } } // Trim the indent off of all lines after the first line for i, line := range lines { if i > 0 { lines[i] = line[indent:] } } return strings.Join(lines, "\n") } func EscapeClosingTag(text string, slashTag string) string { i := strings.Index(text, "</") if i < 0 { return text } var b strings.Builder for { b.WriteString(text[:i+1]) text = text[i+1:] if len(text) >= len(slashTag) && strings.EqualFold(text[:len(slashTag)], slashTag) { b.WriteByte('\\') } i = strings.Index(text, "</") if i < 0 { break } } b.WriteString(text) return b.String() } ================================================ FILE: lib/esbuild/helpers/hash.go ================================================ package helpers // From: http://boost.sourceforge.net/doc/html/boost/hash_combine.html func HashCombine(seed uint32, hash uint32) uint32 { return seed ^ (hash + 0x9e3779b9 + (seed << 6) + (seed >> 2)) } func HashCombineString(seed uint32, text string) uint32 { seed = HashCombine(seed, uint32(len(text))) for _, c := range text { seed = HashCombine(seed, uint32(c)) } return seed } ================================================ FILE: lib/esbuild/helpers/joiner.go ================================================ package helpers import ( "bytes" "strings" ) // This provides an efficient way to join lots of big string and byte slices // together. It avoids the cost of repeatedly reallocating as the buffer grows // by measuring exactly how big the buffer should be and then allocating once. // This is a measurable speedup. type Joiner struct { strings []joinerString bytes []joinerBytes length uint32 lastByte byte } type joinerString struct { data string offset uint32 } type joinerBytes struct { data []byte offset uint32 } func (j *Joiner) AddString(data string) { if len(data) > 0 { j.lastByte = data[len(data)-1] } j.strings = append(j.strings, joinerString{data, j.length}) j.length += uint32(len(data)) } func (j *Joiner) AddBytes(data []byte) { if len(data) > 0 { j.lastByte = data[len(data)-1] } j.bytes = append(j.bytes, joinerBytes{data, j.length}) j.length += uint32(len(data)) } func (j *Joiner) LastByte() byte { return j.lastByte } func (j *Joiner) Length() uint32 { return j.length } func (j *Joiner) EnsureNewlineAtEnd() { if j.length > 0 && j.lastByte != '\n' { j.AddString("\n") } } func (j *Joiner) Done() []byte { if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 { // No need to allocate if there was only a single byte array written return j.bytes[0].data } buffer := make([]byte, j.length) for _, item := range j.strings { copy(buffer[item.offset:], item.data) } for _, item := range j.bytes { copy(buffer[item.offset:], item.data) } return buffer } func (j *Joiner) Contains(s string, b []byte) bool { for _, item := range j.strings { if strings.Contains(item.data, s) { return true } } for _, item := range j.bytes { if bytes.Contains(item.data, b) { return true } } return false } ================================================ FILE: lib/esbuild/helpers/mime.go ================================================ package helpers import "strings" var builtinTypesLower = map[string]string{ ".css": "text/css; charset=utf-8", ".gif": "image/gif", ".htm": "text/html; charset=utf-8", ".html": "text/html; charset=utf-8", ".jpeg": "image/jpeg", ".jpg": "image/jpeg", ".js": "text/javascript; charset=utf-8", ".json": "application/json", ".mjs": "text/javascript; charset=utf-8", ".pdf": "application/pdf", ".png": "image/png", ".svg": "image/svg+xml", ".wasm": "application/wasm", ".webp": "image/webp", ".xml": "text/xml; charset=utf-8", } // This is used instead of Go's built-in "mime.TypeByExtension" function because // that function is broken on Windows: https://github.com/golang/go/issues/32350. func MimeTypeByExtension(ext string) string { contentType := builtinTypesLower[ext] if contentType == "" { contentType = builtinTypesLower[strings.ToLower(ext)] } return contentType } ================================================ FILE: lib/esbuild/helpers/path.go ================================================ package helpers import "strings" func IsInsideNodeModules(path string) bool { for { // This is written in a platform-independent manner because it's run on // user-specified paths which can be arbitrary non-file-system things. So // for example Windows paths may end up being used on Unix or URLs may end // up being used on Windows. Be consistently agnostic to which kind of // slash is used on all platforms. slash := strings.LastIndexAny(path, "/\\") if slash == -1 { return false } dir, base := path[:slash], path[slash+1:] if base == "node_modules" { return true } path = dir } } ================================================ FILE: lib/esbuild/helpers/serializer.go ================================================ package helpers import "sync" // Each call to "Enter(i)" doesn't start until "Leave(i-1)" is called type Serializer struct { flags []sync.WaitGroup } func MakeSerializer(count int) Serializer { flags := make([]sync.WaitGroup, count) for i := 0; i < count; i++ { flags[i].Add(1) } return Serializer{flags: flags} } func (s *Serializer) Enter(i int) { if i > 0 { s.flags[i-1].Wait() } } func (s *Serializer) Leave(i int) { s.flags[i].Done() } ================================================ FILE: lib/esbuild/helpers/stack.go ================================================ package helpers import ( "runtime/debug" "strings" ) func PrettyPrintedStack() string { lines := strings.Split(strings.TrimSpace(string(debug.Stack())), "\n") // Strip the first "goroutine" line if len(lines) > 0 { if first := lines[0]; strings.HasPrefix(first, "goroutine ") && strings.HasSuffix(first, ":") { lines = lines[1:] } } sb := strings.Builder{} for _, line := range lines { // Indented lines are source locations if strings.HasPrefix(line, "\t") { line = line[1:] line = strings.TrimPrefix(line, "github.com/evanw/esbuild/") if offset := strings.LastIndex(line, " +0x"); offset != -1 { line = line[:offset] } sb.WriteString(" (") sb.WriteString(line) sb.WriteString(")") continue } // Other lines are function calls if sb.Len() > 0 { sb.WriteByte('\n') } if strings.HasSuffix(line, ")") { if paren := strings.LastIndexByte(line, '('); paren != -1 { line = line[:paren] } } if slash := strings.LastIndexByte(line, '/'); slash != -1 { line = line[slash+1:] } sb.WriteString(line) } return sb.String() } ================================================ FILE: lib/esbuild/helpers/timer.go ================================================ package helpers import ( "fmt" "strings" "sync" "time" "github.com/withastro/compiler/lib/esbuild/logger" ) type Timer struct { data []timerData mutex sync.Mutex } type timerData struct { time time.Time name string isEnd bool } func (t *Timer) Begin(name string) { if t != nil { t.data = append(t.data, timerData{ name: name, time: time.Now(), }) } } func (t *Timer) End(name string) { if t != nil { t.data = append(t.data, timerData{ name: name, time: time.Now(), isEnd: true, }) } } func (t *Timer) Fork() *Timer { if t != nil { return &Timer{} } return nil } func (t *Timer) Join(other *Timer) { if t != nil && other != nil { t.mutex.Lock() defer t.mutex.Unlock() t.data = append(t.data, other.data...) } } func (t *Timer) Log(log logger.Log) { if t == nil { return } type pair struct { timerData index uint32 } var notes []logger.MsgData var stack []pair indent := 0 for _, item := range t.data { if !item.isEnd { top := pair{timerData: item, index: uint32(len(notes))} notes = append(notes, logger.MsgData{DisableMaximumWidth: true}) stack = append(stack, top) indent++ } else { indent-- last := len(stack) - 1 top := stack[last] stack = stack[:last] if item.name != top.name { panic("Internal error") } notes[top.index].Text = fmt.Sprintf("%s%s: %dms", strings.Repeat(" ", indent), top.name, item.time.Sub(top.time).Milliseconds()) } } log.AddWithNotes(logger.Info, nil, logger.Range{}, "Timing information (times may not nest hierarchically due to parallelism)", notes) } ================================================ FILE: lib/esbuild/helpers/typos.go ================================================ package helpers import "unicode/utf8" type TypoDetector struct { oneCharTypos map[string]string } func MakeTypoDetector(valid []string) TypoDetector { detector := TypoDetector{oneCharTypos: make(map[string]string)} // Add all combinations of each valid word with one character missing for _, correct := range valid { if len(correct) > 3 { for i, ch := range correct { detector.oneCharTypos[correct[:i]+correct[i+utf8.RuneLen(ch):]] = correct } } } return detector } func (detector TypoDetector) MaybeCorrectTypo(typo string) (string, bool) { // Check for a single deleted character if corrected, ok := detector.oneCharTypos[typo]; ok { return corrected, true } // Check for a single misplaced character for i, ch := range typo { if corrected, ok := detector.oneCharTypos[typo[:i]+typo[i+utf8.RuneLen(ch):]]; ok { return corrected, true } } return "", false } ================================================ FILE: lib/esbuild/helpers/utf.go ================================================ package helpers import ( "strings" "unicode/utf8" ) func ContainsNonBMPCodePoint(text string) bool { for _, c := range text { if c > 0xFFFF { return true } } return false } // This does "ContainsNonBMPCodePoint(UTF16ToString(text))" without any allocations func ContainsNonBMPCodePointUTF16(text []uint16) bool { if n := len(text); n > 0 { for i, c := range text[:n-1] { // Check for a high surrogate if c >= 0xD800 && c <= 0xDBFF { // Check for a low surrogate if c2 := text[i+1]; c2 >= 0xDC00 && c2 <= 0xDFFF { return true } } } } return false } func StringToUTF16(text string) []uint16 { decoded := make([]uint16, 0, len(text)) for _, c := range text { if c <= 0xFFFF { decoded = append(decoded, uint16(c)) } else { c -= 0x10000 decoded = append(decoded, uint16(0xD800+((c>>10)&0x3FF)), uint16(0xDC00+(c&0x3FF))) } } return decoded } func UTF16ToString(text []uint16) string { var temp [utf8.UTFMax]byte b := strings.Builder{} n := len(text) for i := 0; i < n; i++ { r1 := rune(text[i]) if r1 >= 0xD800 && r1 <= 0xDBFF && i+1 < n { if r2 := rune(text[i+1]); r2 >= 0xDC00 && r2 <= 0xDFFF { r1 = (r1-0xD800)<<10 | (r2 - 0xDC00) + 0x10000 i++ } } width := encodeWTF8Rune(temp[:], r1) b.Write(temp[:width]) } return b.String() } func UTF16ToStringWithValidation(text []uint16) (string, uint16, bool) { var temp [utf8.UTFMax]byte b := strings.Builder{} n := len(text) for i := 0; i < n; i++ { r1 := rune(text[i]) if r1 >= 0xD800 && r1 <= 0xDBFF { if i+1 < n { if r2 := rune(text[i+1]); r2 >= 0xDC00 && r2 <= 0xDFFF { r1 = (r1-0xD800)<<10 | (r2 - 0xDC00) + 0x10000 i++ } else { return "", uint16(r1), false } } else { return "", uint16(r1), false } } else if r1 >= 0xDC00 && r1 <= 0xDFFF { return "", uint16(r1), false } width := encodeWTF8Rune(temp[:], r1) b.Write(temp[:width]) } return b.String(), 0, true } // Does "UTF16ToString(text) == str" without a temporary allocation func UTF16EqualsString(text []uint16, str string) bool { if len(text) > len(str) { // Strings can't be equal if UTF-16 encoding is longer than UTF-8 encoding return false } var temp [utf8.UTFMax]byte n := len(text) j := 0 for i := 0; i < n; i++ { r1 := rune(text[i]) if r1 >= 0xD800 && r1 <= 0xDBFF && i+1 < n { if r2 := rune(text[i+1]); r2 >= 0xDC00 && r2 <= 0xDFFF { r1 = (r1-0xD800)<<10 | (r2 - 0xDC00) + 0x10000 i++ } } width := encodeWTF8Rune(temp[:], r1) if j+width > len(str) { return false } for k := 0; k < width; k++ { if temp[k] != str[j] { return false } j++ } } return j == len(str) } func UTF16EqualsUTF16(a []uint16, b []uint16) bool { if len(a) == len(b) { for i, c := range a { if c != b[i] { return false } } return true } return false } // This is a clone of "utf8.EncodeRune" that has been modified to encode using // WTF-8 instead. See https://simonsapin.github.io/wtf-8/ for more info. func encodeWTF8Rune(p []byte, r rune) int { // Negative values are erroneous. Making it unsigned addresses the problem. switch i := uint32(r); { case i <= 0x7F: p[0] = byte(r) return 1 case i <= 0x7FF: _ = p[1] // eliminate bounds checks p[0] = 0xC0 | byte(r>>6) p[1] = 0x80 | byte(r)&0x3F return 2 case i > utf8.MaxRune: r = utf8.RuneError fallthrough case i <= 0xFFFF: _ = p[2] // eliminate bounds checks p[0] = 0xE0 | byte(r>>12) p[1] = 0x80 | byte(r>>6)&0x3F p[2] = 0x80 | byte(r)&0x3F return 3 default: _ = p[3] // eliminate bounds checks p[0] = 0xF0 | byte(r>>18) p[1] = 0x80 | byte(r>>12)&0x3F p[2] = 0x80 | byte(r>>6)&0x3F p[3] = 0x80 | byte(r)&0x3F return 4 } } // This is a clone of "utf8.DecodeRuneInString" that has been modified to // decode using WTF-8 instead. See https://simonsapin.github.io/wtf-8/ for // more info. func DecodeWTF8Rune(s string) (rune, int) { n := len(s) if n < 1 { return utf8.RuneError, 0 } s0 := s[0] if s0 < 0x80 { return rune(s0), 1 } var sz int if (s0 & 0xE0) == 0xC0 { sz = 2 } else if (s0 & 0xF0) == 0xE0 { sz = 3 } else if (s0 & 0xF8) == 0xF0 { sz = 4 } else { return utf8.RuneError, 1 } if n < sz { return utf8.RuneError, 0 } s1 := s[1] if (s1 & 0xC0) != 0x80 { return utf8.RuneError, 1 } if sz == 2 { cp := rune(s0&0x1F)<<6 | rune(s1&0x3F) if cp < 0x80 { return utf8.RuneError, 1 } return cp, 2 } s2 := s[2] if (s2 & 0xC0) != 0x80 { return utf8.RuneError, 1 } if sz == 3 { cp := rune(s0&0x0F)<<12 | rune(s1&0x3F)<<6 | rune(s2&0x3F) if cp < 0x0800 { return utf8.RuneError, 1 } return cp, 3 } s3 := s[3] if (s3 & 0xC0) != 0x80 { return utf8.RuneError, 1 } cp := rune(s0&0x07)<<18 | rune(s1&0x3F)<<12 | rune(s2&0x3F)<<6 | rune(s3&0x3F) if cp < 0x010000 || cp > 0x10FFFF { return utf8.RuneError, 1 } return cp, 4 } ================================================ FILE: lib/esbuild/logger/logger.go ================================================ package logger // Logging is currently designed to look and feel like clang's error format. // Errors are streamed asynchronously as they happen, each error contains the // contents of the line with the error, and the error count is limited by // default. import ( "fmt" "os" "runtime" "sort" "strings" "sync" "time" "unicode/utf8" ) const defaultTerminalWidth = 80 type Log struct { AddMsg func(Msg) HasErrors func() bool // This is called after the build has finished but before writing to stdout. // It exists to ensure that deferred warning messages end up in the terminal // before the data written to stdout. AlmostDone func() Done func() []Msg Level LogLevel } type LogLevel int8 const ( LevelNone LogLevel = iota LevelVerbose LevelDebug LevelInfo LevelWarning LevelError LevelSilent ) type MsgKind uint8 const ( Error MsgKind = iota Warning Info Note Debug Verbose ) func (kind MsgKind) String() string { switch kind { case Error: return "ERROR" case Warning: return "WARNING" case Info: return "INFO" case Note: return "NOTE" case Debug: return "DEBUG" case Verbose: return "VERBOSE" default: panic("Internal error") } } func (kind MsgKind) Icon() string { // Special-case Windows command prompt, which only supports a few characters if isProbablyWindowsCommandPrompt() { switch kind { case Error: return "X" case Warning: return "▲" case Info: return "►" case Note: return "→" case Debug: return "●" case Verbose: return "♦" default: panic("Internal error") } } switch kind { case Error: return "✘" case Warning: return "▲" case Info: return "▶" case Note: return "→" case Debug: return "●" case Verbose: return "⬥" default: panic("Internal error") } } var windowsCommandPrompt struct { mutex sync.Mutex once bool isProbablyCMD bool } func isProbablyWindowsCommandPrompt() bool { windowsCommandPrompt.mutex.Lock() defer windowsCommandPrompt.mutex.Unlock() if !windowsCommandPrompt.once { windowsCommandPrompt.once = true // Assume we are running in Windows Command Prompt if we're on Windows. If // so, we can't use emoji because it won't be supported. Except we can // still use emoji if the WT_SESSION environment variable is present // because that means we're running in the new Windows Terminal instead. if runtime.GOOS == "windows" { windowsCommandPrompt.isProbablyCMD = true for _, env := range os.Environ() { if strings.HasPrefix(env, "WT_SESSION=") { windowsCommandPrompt.isProbablyCMD = false break } } } } return windowsCommandPrompt.isProbablyCMD } type Msg struct { Notes []MsgData PluginName string Data MsgData Kind MsgKind } type MsgData struct { // Optional user-specified data that is passed through unmodified UserDetail interface{} Location *MsgLocation Text string DisableMaximumWidth bool } type MsgLocation struct { File string Namespace string LineText string Suggestion string Line int // 1-based Column int // 0-based, in bytes Length int // in bytes } type Loc struct { // This is the 0-based index of this location from the start of the file, in bytes Start int32 } type Range struct { Loc Loc Len int32 } func (r Range) End() int32 { return r.Loc.Start + r.Len } type Span struct { Text string Range Range } // This type is just so we can use Go's native sort function type SortableMsgs []Msg func (a SortableMsgs) Len() int { return len(a) } func (a SortableMsgs) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } func (a SortableMsgs) Less(i int, j int) bool { ai := a[i] aj := a[j] aiLoc := ai.Data.Location ajLoc := aj.Data.Location if aiLoc == nil || ajLoc == nil { return aiLoc == nil && ajLoc != nil } if aiLoc.File != ajLoc.File { return aiLoc.File < ajLoc.File } if aiLoc.Line != ajLoc.Line { return aiLoc.Line < ajLoc.Line } if aiLoc.Column != ajLoc.Column { return aiLoc.Column < ajLoc.Column } if ai.Kind != aj.Kind { return ai.Kind < aj.Kind } return ai.Data.Text < aj.Data.Text } // This is used to represent both file system paths (Namespace == "file") and // abstract module paths (Namespace != "file"). Abstract module paths represent // "virtual modules" when used for an input file and "package paths" when used // to represent an external module. type Path struct { Text string Namespace string // This feature was added to support ancient CSS libraries that append things // like "?#iefix" and "#icons" to some of their import paths as a hack for IE6. // The intent is for these suffix parts to be ignored but passed through to // the output. This is supported by other bundlers, so we also support this. IgnoredSuffix string Flags PathFlags } type PathFlags uint8 const ( // This corresponds to a value of "false' in the "browser" package.json field PathDisabled PathFlags = 1 << iota ) func (p Path) IsDisabled() bool { return (p.Flags & PathDisabled) != 0 } func (a Path) ComesBeforeInSortedOrder(b Path) bool { return a.Namespace > b.Namespace || (a.Namespace == b.Namespace && (a.Text < b.Text || (a.Text == b.Text && (a.Flags < b.Flags || (a.Flags == b.Flags && a.IgnoredSuffix < b.IgnoredSuffix))))) } var noColorResult bool var noColorOnce sync.Once func hasNoColorEnvironmentVariable() bool { noColorOnce.Do(func() { for _, key := range os.Environ() { // Read "NO_COLOR" from the environment. This is a convention that some // software follows. See https://no-color.org/ for more information. if strings.HasPrefix(key, "NO_COLOR=") { noColorResult = true } } }) return noColorResult } // This has a custom implementation instead of using "filepath.Dir/Base/Ext" // because it should work the same on Unix and Windows. These names end up in // the generated output and the generated output should not depend on the OS. func PlatformIndependentPathDirBaseExt(path string) (dir string, base string, ext string) { for { i := strings.LastIndexAny(path, "/\\") // Stop if there are no more slashes if i < 0 { base = path break } // Stop if we found a non-trailing slash if i+1 != len(path) { dir, base = path[:i], path[i+1:] break } // Ignore trailing slashes path = path[:i] } // Strip off the extension if dot := strings.LastIndexByte(base, '.'); dot >= 0 { base, ext = base[:dot], base[dot:] } return } type Source struct { // This is used for error messages and the metadata JSON file. // // This is a mostly platform-independent path. It's relative to the current // working directory and always uses standard path separators. Use this for // referencing a file in all output data. These paths still use the original // case of the path so they may still work differently on file systems that // are case-insensitive vs. case-sensitive. PrettyPath string // An identifier that is mixed in to automatically-generated symbol names to // improve readability. For example, if the identifier is "util" then the // symbol for an "export default" statement will be called "util_default". IdentifierName string Contents string // This is used as a unique key to identify this source file. It should never // be shown to the user (e.g. never print this to the terminal). // // If it's marked as an absolute path, it's a platform-dependent path that // includes environment-specific things such as Windows backslash path // separators and potentially the user's home directory. Only use this for // passing to syscalls for reading and writing to the file system. Do not // include this in any output data. // // If it's marked as not an absolute path, it's an opaque string that is used // to refer to an automatically-generated module. KeyPath Path Index uint32 } func (s *Source) TextForRange(r Range) string { return s.Contents[r.Loc.Start : r.Loc.Start+r.Len] } func (s *Source) LocBeforeWhitespace(loc Loc) Loc { for loc.Start > 0 { c, width := utf8.DecodeLastRuneInString(s.Contents[:loc.Start]) if c != ' ' && c != '\t' && c != '\r' && c != '\n' { break } loc.Start -= int32(width) } return loc } func (s *Source) RangeOfOperatorBefore(loc Loc, op string) Range { text := s.Contents[:loc.Start] index := strings.LastIndex(text, op) if index >= 0 { return Range{Loc: Loc{Start: int32(index)}, Len: int32(len(op))} } return Range{Loc: loc} } func (s *Source) RangeOfOperatorAfter(loc Loc, op string) Range { text := s.Contents[loc.Start:] index := strings.Index(text, op) if index >= 0 { return Range{Loc: Loc{Start: loc.Start + int32(index)}, Len: int32(len(op))} } return Range{Loc: loc} } func (s *Source) RangeOfString(loc Loc) Range { text := s.Contents[loc.Start:] if len(text) == 0 { return Range{Loc: loc, Len: 0} } quote := text[0] if quote == '"' || quote == '\'' { // Search for the matching quote character for i := 1; i < len(text); i++ { c := text[i] if c == quote { return Range{Loc: loc, Len: int32(i + 1)} } else if c == '\\' { i += 1 } } } return Range{Loc: loc, Len: 0} } func (s *Source) RangeOfNumber(loc Loc) (r Range) { text := s.Contents[loc.Start:] r = Range{Loc: loc, Len: 0} if len(text) > 0 { if c := text[0]; c >= '0' && c <= '9' { r.Len = 1 for int(r.Len) < len(text) { c := text[r.Len] if (c < '0' || c > '9') && (c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && c != '.' && c != '_' { break } r.Len++ } } } return } func (s *Source) RangeOfLegacyOctalEscape(loc Loc) (r Range) { text := s.Contents[loc.Start:] r = Range{Loc: loc, Len: 0} if len(text) >= 2 && text[0] == '\\' { r.Len = 2 for r.Len < 4 && int(r.Len) < len(text) { c := text[r.Len] if c < '0' || c > '9' { break } r.Len++ } } return } func plural(prefix string, count int, shown int, someAreMissing bool) string { var text string if count == 1 { text = fmt.Sprintf("%d %s", count, prefix) } else { text = fmt.Sprintf("%d %ss", count, prefix) } if shown < count { text = fmt.Sprintf("%d of %s", shown, text) } else if someAreMissing && count > 1 { text = "all " + text } return text } func errorAndWarningSummary(errors int, warnings int, shownErrors int, shownWarnings int) string { someAreMissing := shownWarnings < warnings || shownErrors < errors switch { case errors == 0: return plural("warning", warnings, shownWarnings, someAreMissing) case warnings == 0: return plural("error", errors, shownErrors, someAreMissing) default: return fmt.Sprintf("%s and %s", plural("warning", warnings, shownWarnings, someAreMissing), plural("error", errors, shownErrors, someAreMissing)) } } type APIKind uint8 const ( GoAPI APIKind = iota CLIAPI JSAPI ) // This can be used to customize error messages for the current API kind var API APIKind type TerminalInfo struct { IsTTY bool UseColorEscapes bool Width int Height int } func NewStderrLog(options OutputOptions) Log { var mutex sync.Mutex var msgs SortableMsgs terminalInfo := GetTerminalInfo(os.Stderr) errors := 0 warnings := 0 shownErrors := 0 shownWarnings := 0 hasErrors := false remainingMessagesBeforeLimit := options.MessageLimit if remainingMessagesBeforeLimit == 0 { remainingMessagesBeforeLimit = 0x7FFFFFFF } var deferredWarnings []Msg didFinalizeLog := false finalizeLog := func() { if didFinalizeLog { return } didFinalizeLog = true // Print the deferred warning now if there was no error after all for remainingMessagesBeforeLimit > 0 && len(deferredWarnings) > 0 { shownWarnings++ writeStringWithColor(os.Stderr, deferredWarnings[0].String(options, terminalInfo)) deferredWarnings = deferredWarnings[1:] remainingMessagesBeforeLimit-- } // Print out a summary if options.MessageLimit > 0 && errors+warnings > options.MessageLimit { writeStringWithColor(os.Stderr, fmt.Sprintf("%s shown (disable the message limit with --log-limit=0)\n", errorAndWarningSummary(errors, warnings, shownErrors, shownWarnings))) } else if options.LogLevel <= LevelInfo && (warnings != 0 || errors != 0) { writeStringWithColor(os.Stderr, fmt.Sprintf("%s\n", errorAndWarningSummary(errors, warnings, shownErrors, shownWarnings))) } } switch options.Color { case ColorNever: terminalInfo.UseColorEscapes = false case ColorAlways: terminalInfo.UseColorEscapes = SupportsColorEscapes } return Log{ Level: options.LogLevel, AddMsg: func(msg Msg) { mutex.Lock() defer mutex.Unlock() msgs = append(msgs, msg) switch msg.Kind { case Verbose: if options.LogLevel <= LevelVerbose { writeStringWithColor(os.Stderr, msg.String(options, terminalInfo)) } case Debug: if options.LogLevel <= LevelDebug { writeStringWithColor(os.Stderr, msg.String(options, terminalInfo)) } case Info: if options.LogLevel <= LevelInfo { writeStringWithColor(os.Stderr, msg.String(options, terminalInfo)) } case Error: hasErrors = true if options.LogLevel <= LevelError { errors++ } case Warning: if options.LogLevel <= LevelWarning { warnings++ } } // Be silent if we're past the limit so we don't flood the terminal if remainingMessagesBeforeLimit == 0 { return } switch msg.Kind { case Error: if options.LogLevel <= LevelError { shownErrors++ writeStringWithColor(os.Stderr, msg.String(options, terminalInfo)) remainingMessagesBeforeLimit-- } case Warning: if options.LogLevel <= LevelWarning { if remainingMessagesBeforeLimit > (options.MessageLimit+1)/2 { shownWarnings++ writeStringWithColor(os.Stderr, msg.String(options, terminalInfo)) remainingMessagesBeforeLimit-- } else { // If we have less than half of the slots left, wait for potential // future errors instead of using up all of the slots with warnings. // We want the log for a failed build to always have at least one // error in it. deferredWarnings = append(deferredWarnings, msg) } } } }, HasErrors: func() bool { mutex.Lock() defer mutex.Unlock() return hasErrors }, AlmostDone: func() { mutex.Lock() defer mutex.Unlock() finalizeLog() }, Done: func() []Msg { mutex.Lock() defer mutex.Unlock() finalizeLog() sort.Stable(msgs) return msgs }, } } func PrintErrorToStderr(osArgs []string, text string) { PrintMessageToStderr(osArgs, Msg{Kind: Error, Data: MsgData{Text: text}}) } func OutputOptionsForArgs(osArgs []string) OutputOptions { options := OutputOptions{IncludeSource: true} // Implement a mini argument parser so these options always work even if we // haven't yet gotten to the general-purpose argument parsing code for _, arg := range osArgs { switch arg { case "--color=false": options.Color = ColorNever case "--color=true", "--color": options.Color = ColorAlways case "--log-level=info": options.LogLevel = LevelInfo case "--log-level=warning": options.LogLevel = LevelWarning case "--log-level=error": options.LogLevel = LevelError case "--log-level=silent": options.LogLevel = LevelSilent } } return options } func PrintMessageToStderr(osArgs []string, msg Msg) { log := NewStderrLog(OutputOptionsForArgs(osArgs)) log.AddMsg(msg) log.Done() } type Colors struct { Reset string Bold string Dim string Underline string Red string Green string Blue string Cyan string Magenta string Yellow string RedBgRed string RedBgWhite string GreenBgGreen string GreenBgWhite string BlueBgBlue string BlueBgWhite string CyanBgCyan string CyanBgBlack string MagentaBgMagenta string MagentaBgBlack string YellowBgYellow string YellowBgBlack string } var TerminalColors = Colors{ Reset: "\033[0m", Bold: "\033[1m", Dim: "\033[37m", Underline: "\033[4m", Red: "\033[31m", Green: "\033[32m", Blue: "\033[34m", Cyan: "\033[36m", Magenta: "\033[35m", Yellow: "\033[33m", RedBgRed: "\033[41;31m", RedBgWhite: "\033[41;97m", GreenBgGreen: "\033[42;32m", GreenBgWhite: "\033[42;97m", BlueBgBlue: "\033[44;34m", BlueBgWhite: "\033[44;97m", CyanBgCyan: "\033[46;36m", CyanBgBlack: "\033[46;30m", MagentaBgMagenta: "\033[45;35m", MagentaBgBlack: "\033[45;30m", YellowBgYellow: "\033[43;33m", YellowBgBlack: "\033[43;30m", } func PrintText(file *os.File, level LogLevel, osArgs []string, callback func(Colors) string) { options := OutputOptionsForArgs(osArgs) // Skip logging these if these logs are disabled if options.LogLevel > level { return } PrintTextWithColor(file, options.Color, callback) } func PrintTextWithColor(file *os.File, useColor UseColor, callback func(Colors) string) { var useColorEscapes bool switch useColor { case ColorNever: useColorEscapes = false case ColorAlways: useColorEscapes = SupportsColorEscapes case ColorIfTerminal: useColorEscapes = GetTerminalInfo(file).UseColorEscapes } var colors Colors if useColorEscapes { colors = TerminalColors } writeStringWithColor(file, callback(colors)) } type SummaryTableEntry struct { Dir string Base string Size string Bytes int IsSourceMap bool } // This type is just so we can use Go's native sort function type SummaryTable []SummaryTableEntry func (t SummaryTable) Len() int { return len(t) } func (t SummaryTable) Swap(i int, j int) { t[i], t[j] = t[j], t[i] } func (t SummaryTable) Less(i int, j int) bool { ti := t[i] tj := t[j] // Sort source maps last if !ti.IsSourceMap && tj.IsSourceMap { return true } if ti.IsSourceMap && !tj.IsSourceMap { return false } // Sort by size first if ti.Bytes > tj.Bytes { return true } if ti.Bytes < tj.Bytes { return false } // Sort alphabetically by directory first if ti.Dir < tj.Dir { return true } if ti.Dir > tj.Dir { return false } // Then sort alphabetically by file name return ti.Base < tj.Base } // Show a warning icon next to output files that are 1mb or larger const sizeWarningThreshold = 1024 * 1024 func PrintSummary(useColor UseColor, table SummaryTable, start *time.Time) { PrintTextWithColor(os.Stderr, useColor, func(colors Colors) string { isProbablyWindowsCommandPrompt := isProbablyWindowsCommandPrompt() sb := strings.Builder{} if len(table) > 0 { info := GetTerminalInfo(os.Stderr) // Truncate the table in case it's really long maxLength := info.Height / 2 if info.Height == 0 { maxLength = 20 } else if maxLength < 5 { maxLength = 5 } length := len(table) sort.Sort(table) if length > maxLength { table = table[:maxLength] } // Compute the maximum width of the size column spacingBetweenColumns := 2 hasSizeWarning := false maxPath := 0 maxSize := 0 for _, entry := range table { path := len(entry.Dir) + len(entry.Base) size := len(entry.Size) + spacingBetweenColumns if path > maxPath { maxPath = path } if size > maxSize { maxSize = size } if !entry.IsSourceMap && entry.Bytes >= sizeWarningThreshold { hasSizeWarning = true } } margin := " " layoutWidth := info.Width if layoutWidth < 1 { layoutWidth = defaultTerminalWidth } layoutWidth -= 2 * len(margin) if hasSizeWarning { // Add space for the warning icon layoutWidth -= 2 } if layoutWidth > maxPath+maxSize { layoutWidth = maxPath + maxSize } sb.WriteByte('\n') for _, entry := range table { dir, base := entry.Dir, entry.Base pathWidth := layoutWidth - maxSize // Truncate the path with "..." to fit on one line if len(dir)+len(base) > pathWidth { // Trim the directory from the front, leaving the trailing slash if len(dir) > 0 { n := pathWidth - len(base) - 3 if n < 1 { n = 1 } dir = "..." + dir[len(dir)-n:] } // Trim the file name from the back if len(dir)+len(base) > pathWidth { n := pathWidth - len(dir) - 3 if n < 0 { n = 0 } base = base[:n] + "..." } } spacer := layoutWidth - len(entry.Size) - len(dir) - len(base) if spacer < 0 { spacer = 0 } // Put a warning next to the size if it's above a certain threshold sizeColor := colors.Cyan sizeWarning := "" if !entry.IsSourceMap && entry.Bytes >= sizeWarningThreshold { sizeColor = colors.Yellow // Emoji don't work in Windows Command Prompt if !isProbablyWindowsCommandPrompt { sizeWarning = " ⚠️" } } sb.WriteString(fmt.Sprintf("%s%s%s%s%s%s%s%s%s%s%s%s\n", margin, colors.Dim, dir, colors.Reset, colors.Bold, base, colors.Reset, strings.Repeat(" ", spacer), sizeColor, entry.Size, sizeWarning, colors.Reset, )) } // Say how many remaining files are not shown if length > maxLength { plural := "s" if length == maxLength+1 { plural = "" } sb.WriteString(fmt.Sprintf("%s%s...and %d more output file%s...%s\n", margin, colors.Dim, length-maxLength, plural, colors.Reset)) } } sb.WriteByte('\n') lightningSymbol := "⚡ " // Emoji don't work in Windows Command Prompt if isProbablyWindowsCommandPrompt { lightningSymbol = "" } // Printing the time taken is optional if start != nil { sb.WriteString(fmt.Sprintf("%s%sDone in %dms%s\n", lightningSymbol, colors.Green, time.Since(*start).Milliseconds(), colors.Reset, )) } return sb.String() }) } type DeferLogKind uint8 const ( DeferLogAll DeferLogKind = iota DeferLogNoVerboseOrDebug ) func NewDeferLog(kind DeferLogKind) Log { var msgs SortableMsgs var mutex sync.Mutex var hasErrors bool return Log{ Level: LevelInfo, AddMsg: func(msg Msg) { if kind == DeferLogNoVerboseOrDebug && (msg.Kind == Verbose || msg.Kind == Debug) { return } mutex.Lock() defer mutex.Unlock() if msg.Kind == Error { hasErrors = true } msgs = append(msgs, msg) }, HasErrors: func() bool { mutex.Lock() defer mutex.Unlock() return hasErrors }, AlmostDone: func() { }, Done: func() []Msg { mutex.Lock() defer mutex.Unlock() sort.Stable(msgs) return msgs }, } } type UseColor uint8 const ( ColorIfTerminal UseColor = iota ColorNever ColorAlways ) type OutputOptions struct { MessageLimit int IncludeSource bool Color UseColor LogLevel LogLevel } func (msg Msg) String(options OutputOptions, terminalInfo TerminalInfo) string { // Format the message text := msgString(options.IncludeSource, terminalInfo, msg.Kind, msg.Data, msg.PluginName) // Format the notes var oldData MsgData for i, note := range msg.Notes { if options.IncludeSource && (i == 0 || strings.IndexByte(oldData.Text, '\n') >= 0 || oldData.Location != nil) { text += "\n" } text += msgString(options.IncludeSource, terminalInfo, Note, note, "") oldData = note } // Add extra spacing between messages if source code is present if options.IncludeSource { text += "\n" } return text } // The number of margin characters in addition to the line number const extraMarginChars = 9 func marginWithLineText(maxMargin int, line int) string { number := fmt.Sprintf("%d", line) return fmt.Sprintf(" %s%s │ ", strings.Repeat(" ", maxMargin-len(number)), number) } func emptyMarginText(maxMargin int, isLast bool) string { space := strings.Repeat(" ", maxMargin) if isLast { return fmt.Sprintf(" %s ╵ ", space) } return fmt.Sprintf(" %s │ ", space) } func msgString(includeSource bool, terminalInfo TerminalInfo, kind MsgKind, data MsgData, pluginName string) string { if !includeSource { if loc := data.Location; loc != nil { return fmt.Sprintf("%s: %s: %s\n", loc.File, kind.String(), data.Text) } return fmt.Sprintf("%s: %s\n", kind.String(), data.Text) } var colors Colors if terminalInfo.UseColorEscapes { colors = TerminalColors } var iconColor string var kindColorBrackets string var kindColorText string location := "" if data.Location != nil { maxMargin := len(fmt.Sprintf("%d", data.Location.Line)) d := detailStruct(data, terminalInfo, maxMargin) if d.Suggestion != "" { location = fmt.Sprintf("\n %s:%d:%d:\n%s%s%s%s%s%s\n%s%s%s%s%s\n%s%s%s%s%s\n%s", d.Path, d.Line, d.Column, colors.Dim, d.SourceBefore, colors.Green, d.SourceMarked, colors.Dim, d.SourceAfter, emptyMarginText(maxMargin, false), d.Indent, colors.Green, d.Marker, colors.Dim, emptyMarginText(maxMargin, true), d.Indent, colors.Green, d.Suggestion, colors.Reset, d.ContentAfter, ) } else { location = fmt.Sprintf("\n %s:%d:%d:\n%s%s%s%s%s%s\n%s%s%s%s%s\n%s", d.Path, d.Line, d.Column, colors.Dim, d.SourceBefore, colors.Green, d.SourceMarked, colors.Dim, d.SourceAfter, emptyMarginText(maxMargin, true), d.Indent, colors.Green, d.Marker, colors.Reset, d.ContentAfter, ) } } switch kind { case Verbose: iconColor = colors.Cyan kindColorBrackets = colors.CyanBgCyan kindColorText = colors.CyanBgBlack case Debug: iconColor = colors.Green kindColorBrackets = colors.GreenBgGreen kindColorText = colors.GreenBgWhite case Info: iconColor = colors.Blue kindColorBrackets = colors.BlueBgBlue kindColorText = colors.BlueBgWhite case Error: iconColor = colors.Red kindColorBrackets = colors.RedBgRed kindColorText = colors.RedBgWhite case Warning: iconColor = colors.Yellow kindColorBrackets = colors.YellowBgYellow kindColorText = colors.YellowBgBlack case Note: sb := strings.Builder{} for _, line := range strings.Split(data.Text, "\n") { // Special-case word wrapping if wrapWidth := terminalInfo.Width; wrapWidth > 2 { if !data.DisableMaximumWidth && wrapWidth > 100 { wrapWidth = 100 // Enforce a maximum paragraph width for readability } for _, run := range wrapWordsInString(line, wrapWidth-2) { sb.WriteString(" ") sb.WriteString(linkifyText(run, colors.Underline, colors.Reset)) sb.WriteByte('\n') } continue } // Otherwise, just write an indented line sb.WriteString(" ") sb.WriteString(linkifyText(line, colors.Underline, colors.Reset)) sb.WriteByte('\n') } sb.WriteString(location) return sb.String() } if pluginName != "" { pluginName = fmt.Sprintf("%s%s[plugin %s]%s ", colors.Bold, colors.Magenta, pluginName, colors.Reset) } return fmt.Sprintf("%s%s %s[%s%s%s]%s %s%s%s%s\n%s", iconColor, kind.Icon(), kindColorBrackets, kindColorText, kind.String(), kindColorBrackets, colors.Reset, pluginName, colors.Bold, data.Text, colors.Reset, location, ) } func linkifyText(text string, underline string, reset string) string { if underline == "" { return text } https := strings.Index(text, "https://") if https == -1 { return text } sb := strings.Builder{} for { https := strings.Index(text, "https://") if https == -1 { break } end := strings.IndexByte(text[https:], ' ') if end == -1 { end = len(text) } else { end += https } // Remove trailing punctuation if end > https { switch text[end-1] { case '.', ',', '?', '!', ')', ']', '}': end-- } } sb.WriteString(text[:https]) sb.WriteString(underline) sb.WriteString(text[https:end]) sb.WriteString(reset) text = text[end:] } sb.WriteString(text) return sb.String() } func wrapWordsInString(text string, width int) []string { runs := []string{} outer: for text != "" { i := 0 x := 0 wordEndI := 0 // Skip over any leading spaces for i < len(text) && text[i] == ' ' { i++ x++ } // Find out how many words will fit in this run for i < len(text) { oldWordEndI := wordEndI wordStartI := i // Find the end of the word for i < len(text) { c, width := utf8.DecodeRuneInString(text[i:]) if c == ' ' { break } i += width x += 1 // Naively assume that each unicode code point is a single column } wordEndI = i // Split into a new run if this isn't the first word in the run and the end is past the width if wordStartI > 0 && x > width { runs = append(runs, text[:oldWordEndI]) text = text[wordStartI:] continue outer } // Skip over any spaces after the word for i < len(text) && text[i] == ' ' { i++ x++ } } // If we get here, this is the last run (i.e. everything fits) break } // Remove any trailing spaces on the last run for len(text) > 0 && text[len(text)-1] == ' ' { text = text[:len(text)-1] } runs = append(runs, text) return runs } type MsgDetail struct { SourceBefore string SourceMarked string SourceAfter string Indent string Marker string Suggestion string ContentAfter string Path string Line int Column int } // It's not common for large files to have many warnings. But when it happens, // we want to make sure that it's not too slow. Source code locations are // represented as byte offsets for compactness but transforming these to // line/column locations for warning messages requires scanning through the // file. A naive approach for this would cause O(n^2) scanning time for n // warnings distributed throughout the file. // // Warnings are typically generated sequentially as the file is scanned. So // one way of optimizing this is to just start scanning from where we left // off last time instead of always starting from the beginning of the file. // That's what this object does. // // Another option could be to eagerly populate an array of line/column offsets // and then use binary search for each query. This might slow down the common // case of a file with only at most a few warnings though, so think before // optimizing too much. Performance in the zero or one warning case is by far // the most important. type LineColumnTracker struct { contents string prettyPath string offset int32 line int32 lineStart int32 lineEnd int32 hasLineStart bool hasLineEnd bool hasSource bool } func MakeLineColumnTracker(source *Source) LineColumnTracker { if source == nil { return LineColumnTracker{ hasSource: false, } } return LineColumnTracker{ contents: source.Contents, prettyPath: source.PrettyPath, hasLineStart: true, hasSource: true, } } func (tracker *LineColumnTracker) MsgData(r Range, text string) MsgData { return MsgData{ Text: text, Location: tracker.MsgLocationOrNil(r), } } func (t *LineColumnTracker) scanTo(offset int32) { contents := t.contents i := t.offset // Scan forward if i < offset { for { r, size := utf8.DecodeRuneInString(contents[i:]) i += int32(size) switch r { case '\n': t.hasLineStart = true t.hasLineEnd = false t.lineStart = i if i == int32(size) || contents[i-int32(size)-1] != '\r' { t.line++ } case '\r', '\u2028', '\u2029': t.hasLineStart = true t.hasLineEnd = false t.lineStart = i t.line++ } if i >= offset { t.offset = i return } } } // Scan backward if i > offset { for { r, size := utf8.DecodeLastRuneInString(contents[:i]) i -= int32(size) switch r { case '\n': t.hasLineStart = false t.hasLineEnd = true t.lineEnd = i if i == 0 || contents[i-1] != '\r' { t.line-- } case '\r', '\u2028', '\u2029': t.hasLineStart = false t.hasLineEnd = true t.lineEnd = i t.line-- } if i <= offset { t.offset = i return } } } } func (t *LineColumnTracker) computeLineAndColumn(offset int) (lineCount int, columnCount int, lineStart int, lineEnd int) { t.scanTo(int32(offset)) // Scan for the start of the line if !t.hasLineStart { contents := t.contents i := t.offset for i > 0 { r, size := utf8.DecodeLastRuneInString(contents[:i]) if r == '\n' || r == '\r' || r == '\u2028' || r == '\u2029' { break } i -= int32(size) } t.hasLineStart = true t.lineStart = i } // Scan for the end of the line if !t.hasLineEnd { contents := t.contents i := t.offset n := int32(len(contents)) for i < n { r, size := utf8.DecodeRuneInString(contents[i:]) if r == '\n' || r == '\r' || r == '\u2028' || r == '\u2029' { break } i += int32(size) } t.hasLineEnd = true t.lineEnd = i } return int(t.line), offset - int(t.lineStart), int(t.lineStart), int(t.lineEnd) } func (tracker *LineColumnTracker) MsgLocationOrNil(r Range) *MsgLocation { if tracker == nil || !tracker.hasSource { return nil } // Convert the index into a line and column number lineCount, columnCount, lineStart, lineEnd := tracker.computeLineAndColumn(int(r.Loc.Start)) return &MsgLocation{ File: tracker.prettyPath, Line: lineCount + 1, // 0-based to 1-based Column: columnCount, Length: int(r.Len), LineText: tracker.contents[lineStart:lineEnd], } } func detailStruct(data MsgData, terminalInfo TerminalInfo, maxMargin int) MsgDetail { // Only highlight the first line of the line text loc := *data.Location endOfFirstLine := len(loc.LineText) for i, c := range loc.LineText { if c == '\r' || c == '\n' || c == '\u2028' || c == '\u2029' { endOfFirstLine = i break } } firstLine := loc.LineText[:endOfFirstLine] afterFirstLine := loc.LineText[endOfFirstLine:] if afterFirstLine != "" && !strings.HasSuffix(afterFirstLine, "\n") { afterFirstLine += "\n" } // Clamp values in range if loc.Line < 0 { loc.Line = 0 } if loc.Column < 0 { loc.Column = 0 } if loc.Length < 0 { loc.Length = 0 } if loc.Column > endOfFirstLine { loc.Column = endOfFirstLine } if loc.Length > endOfFirstLine-loc.Column { loc.Length = endOfFirstLine - loc.Column } spacesPerTab := 2 lineText := renderTabStops(firstLine, spacesPerTab) textUpToLoc := renderTabStops(firstLine[:loc.Column], spacesPerTab) markerStart := len(textUpToLoc) markerEnd := markerStart indent := strings.Repeat(" ", estimateWidthInTerminal(textUpToLoc)) marker := "^" // Extend markers to cover the full range of the error if loc.Length > 0 { markerEnd = len(renderTabStops(firstLine[:loc.Column+loc.Length], spacesPerTab)) } // Clip the marker to the bounds of the line if markerStart > len(lineText) { markerStart = len(lineText) } if markerEnd > len(lineText) { markerEnd = len(lineText) } if markerEnd < markerStart { markerEnd = markerStart } // Trim the line to fit the terminal width width := terminalInfo.Width if width < 1 { width = defaultTerminalWidth } width -= maxMargin + extraMarginChars if width < 1 { width = 1 } if loc.Column == endOfFirstLine { // If the marker is at the very end of the line, the marker will be a "^" // character that extends one column past the end of the line. In this case // we should reserve a column at the end so the marker doesn't wrap. width -= 1 } if len(lineText) > width { // Try to center the error sliceStart := (markerStart + markerEnd - width) / 2 if sliceStart > markerStart-width/5 { sliceStart = markerStart - width/5 } if sliceStart < 0 { sliceStart = 0 } if sliceStart > len(lineText)-width { sliceStart = len(lineText) - width } sliceEnd := sliceStart + width // Slice the line slicedLine := lineText[sliceStart:sliceEnd] markerStart -= sliceStart markerEnd -= sliceStart if markerStart < 0 { markerStart = 0 } if markerEnd > len(slicedLine) { markerEnd = len(slicedLine) } // Truncate the ends with "..." if len(slicedLine) > 3 && sliceStart > 0 { slicedLine = "..." + slicedLine[3:] if markerStart < 3 { markerStart = 3 } } if len(slicedLine) > 3 && sliceEnd < len(lineText) { slicedLine = slicedLine[:len(slicedLine)-3] + "..." if markerEnd > len(slicedLine)-3 { markerEnd = len(slicedLine) - 3 } if markerEnd < markerStart { markerEnd = markerStart } } // Now we can compute the indent lineText = slicedLine indent = strings.Repeat(" ", estimateWidthInTerminal(lineText[:markerStart])) } // If marker is still multi-character after clipping, make the marker wider if markerEnd-markerStart > 1 { marker = strings.Repeat("~", estimateWidthInTerminal(lineText[markerStart:markerEnd])) } // Put a margin before the marker indent margin := marginWithLineText(maxMargin, loc.Line) return MsgDetail{ Path: loc.File, Line: loc.Line, Column: loc.Column, SourceBefore: margin + lineText[:markerStart], SourceMarked: lineText[markerStart:markerEnd], SourceAfter: lineText[markerEnd:], Indent: indent, Marker: marker, Suggestion: loc.Suggestion, ContentAfter: afterFirstLine, } } // Estimate the number of columns this string will take when printed func estimateWidthInTerminal(text string) int { // For now just assume each code point is one column. This is wrong but is // less wrong than assuming each code unit is one column. width := 0 for text != "" { c, size := utf8.DecodeRuneInString(text) text = text[size:] // Ignore the Zero Width No-Break Space character (UTF-8 BOM) if c != 0xFEFF { width++ } } return width } func renderTabStops(withTabs string, spacesPerTab int) string { if !strings.ContainsRune(withTabs, '\t') { return withTabs } withoutTabs := strings.Builder{} count := 0 for _, c := range withTabs { if c == '\t' { spaces := spacesPerTab - count%spacesPerTab for i := 0; i < spaces; i++ { withoutTabs.WriteRune(' ') count++ } } else { withoutTabs.WriteRune(c) count++ } } return withoutTabs.String() } func (log Log) Add(kind MsgKind, tracker *LineColumnTracker, r Range, text string) { log.AddMsg(Msg{ Kind: kind, Data: tracker.MsgData(r, text), }) } func (log Log) AddWithNotes(kind MsgKind, tracker *LineColumnTracker, r Range, text string, notes []MsgData) { log.AddMsg(Msg{ Kind: kind, Data: tracker.MsgData(r, text), Notes: notes, }) } ================================================ FILE: lib/esbuild/logger/logger_darwin.go ================================================ //go:build darwin // +build darwin package logger import ( "os" "golang.org/x/sys/unix" ) const SupportsColorEscapes = true func GetTerminalInfo(file *os.File) (info TerminalInfo) { fd := file.Fd() // Is this file descriptor a terminal? if _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA); err == nil { info.IsTTY = true info.UseColorEscapes = !hasNoColorEnvironmentVariable() // Get the width of the window if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil { info.Width = int(w.Col) info.Height = int(w.Row) } } return } func writeStringWithColor(file *os.File, text string) { file.WriteString(text) } ================================================ FILE: lib/esbuild/logger/logger_linux.go ================================================ //go:build linux // +build linux package logger import ( "os" "golang.org/x/sys/unix" ) const SupportsColorEscapes = true func GetTerminalInfo(file *os.File) (info TerminalInfo) { fd := file.Fd() // Is this file descriptor a terminal? if _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS); err == nil { info.IsTTY = true info.UseColorEscapes = !hasNoColorEnvironmentVariable() // Get the width of the window if w, err := unix.IoctlGetWinsize(int(fd), unix.TIOCGWINSZ); err == nil { info.Width = int(w.Col) info.Height = int(w.Row) } } return } func writeStringWithColor(file *os.File, text string) { file.WriteString(text) } ================================================ FILE: lib/esbuild/logger/logger_other.go ================================================ //go:build !darwin && !linux && !windows // +build !darwin,!linux,!windows package logger import "os" const SupportsColorEscapes = false func GetTerminalInfo(*os.File) TerminalInfo { return TerminalInfo{} } func writeStringWithColor(file *os.File, text string) { file.WriteString(text) } ================================================ FILE: lib/esbuild/logger/logger_windows.go ================================================ //go:build windows // +build windows package logger import ( "os" "strings" "syscall" "unsafe" ) const SupportsColorEscapes = true var kernel32 = syscall.NewLazyDLL("kernel32.dll") var getConsoleMode = kernel32.NewProc("GetConsoleMode") var setConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute") var getConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo") type consoleScreenBufferInfo struct { dwSizeX int16 dwSizeY int16 dwCursorPositionX int16 dwCursorPositionY int16 wAttributes uint16 srWindowLeft int16 srWindowTop int16 srWindowRight int16 srWindowBottom int16 dwMaximumWindowSizeX int16 dwMaximumWindowSizeY int16 } func GetTerminalInfo(file *os.File) TerminalInfo { fd := file.Fd() // Is this file descriptor a terminal? var unused uint32 isTTY, _, _ := syscall.Syscall(getConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&unused)), 0) // Get the width of the window var info consoleScreenBufferInfo syscall.Syscall(getConsoleScreenBufferInfo.Addr(), 2, fd, uintptr(unsafe.Pointer(&info)), 0) return TerminalInfo{ IsTTY: isTTY != 0, Width: int(info.dwSizeX) - 1, Height: int(info.dwSizeY) - 1, UseColorEscapes: !hasNoColorEnvironmentVariable(), } } const ( FOREGROUND_BLUE uint8 = 1 << iota FOREGROUND_GREEN FOREGROUND_RED FOREGROUND_INTENSITY BACKGROUND_BLUE BACKGROUND_GREEN BACKGROUND_RED BACKGROUND_INTENSITY ) var windowsEscapeSequenceMap = map[string]uint8{ TerminalColors.Reset: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE, TerminalColors.Dim: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE, TerminalColors.Bold: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY, // Apparently underlines only work with the CJK locale on Windows :( TerminalColors.Underline: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE, TerminalColors.Red: FOREGROUND_RED, TerminalColors.Green: FOREGROUND_GREEN, TerminalColors.Blue: FOREGROUND_BLUE, TerminalColors.Cyan: FOREGROUND_GREEN | FOREGROUND_BLUE, TerminalColors.Magenta: FOREGROUND_RED | FOREGROUND_BLUE, TerminalColors.Yellow: FOREGROUND_RED | FOREGROUND_GREEN, TerminalColors.RedBgRed: FOREGROUND_RED | BACKGROUND_RED, TerminalColors.RedBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_RED, TerminalColors.GreenBgGreen: FOREGROUND_GREEN | BACKGROUND_GREEN, TerminalColors.GreenBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN, TerminalColors.BlueBgBlue: FOREGROUND_BLUE | BACKGROUND_BLUE, TerminalColors.BlueBgWhite: FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_BLUE, TerminalColors.CyanBgCyan: FOREGROUND_GREEN | FOREGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_BLUE, TerminalColors.CyanBgBlack: BACKGROUND_GREEN | BACKGROUND_BLUE, TerminalColors.MagentaBgMagenta: FOREGROUND_RED | FOREGROUND_BLUE | BACKGROUND_RED | BACKGROUND_BLUE, TerminalColors.MagentaBgBlack: BACKGROUND_RED | BACKGROUND_BLUE, TerminalColors.YellowBgYellow: FOREGROUND_RED | FOREGROUND_GREEN | BACKGROUND_RED | BACKGROUND_GREEN, TerminalColors.YellowBgBlack: BACKGROUND_RED | BACKGROUND_GREEN, } func writeStringWithColor(file *os.File, text string) { fd := file.Fd() i := 0 for i < len(text) { // Find the escape if text[i] != 033 { i++ continue } // Find the 'm' window := text[i:] if len(window) > 8 { window = window[:8] } m := strings.IndexByte(window, 'm') if m == -1 { i++ continue } m += i + 1 // Find the escape sequence attributes, ok := windowsEscapeSequenceMap[text[i:m]] if !ok { i++ continue } // Write out the text before the escape sequence file.WriteString(text[:i]) // Apply the escape sequence text = text[m:] i = 0 setConsoleTextAttribute.Call(fd, uintptr(attributes)) } // Write out the remaining text file.WriteString(text) } ================================================ FILE: lib/esbuild/sourcemap/sourcemap.go ================================================ package sourcemap import ( "bytes" "unicode/utf8" "github.com/withastro/compiler/lib/esbuild/helpers" "github.com/withastro/compiler/lib/esbuild/logger" ) type Mapping struct { GeneratedLine int32 // 0-based GeneratedColumn int32 // 0-based count of UTF-16 code units SourceIndex int32 // 0-based OriginalLine int32 // 0-based OriginalColumn int32 // 0-based count of UTF-16 code units } type SourceMap struct { Sources []string SourcesContent []SourceContent Mappings []Mapping } type SourceContent struct { // This stores both the unquoted and the quoted values. We try to use the // already-quoted value if possible so we don't need to re-quote it // unnecessarily for maximum performance. Quoted string // But sometimes we need to re-quote the value, such as when it contains // non-ASCII characters and we are in ASCII-only mode. In that case we quote // this parsed UTF-16 value. Value []uint16 } func (sm *SourceMap) Find(line int32, column int32) *Mapping { mappings := sm.Mappings // Binary search count := len(mappings) index := 0 for count > 0 { step := count / 2 i := index + step mapping := mappings[i] if mapping.GeneratedLine < line || (mapping.GeneratedLine == line && mapping.GeneratedColumn <= column) { index = i + 1 count -= step + 1 } else { count = step } } // Handle search failure if index > 0 { mapping := &mappings[index-1] // Match the behavior of the popular "source-map" library from Mozilla if mapping.GeneratedLine == line { return mapping } } return nil } var base64 = []byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") // A single base 64 digit can contain 6 bits of data. For the base 64 variable // length quantities we use in the source map spec, the first bit is the sign, // the next four bits are the actual value, and the 6th bit is the continuation // bit. The continuation bit tells us whether there are more digits in this // value following this digit. // // Continuation // | Sign // | | // V V // 101011 // func EncodeVLQ(value int) []byte { var vlq int if value < 0 { vlq = ((-value) << 1) | 1 } else { vlq = value << 1 } // Handle the common case up front without allocations if (vlq >> 5) == 0 { digit := vlq & 31 return base64[digit : digit+1] } encoded := []byte{} for { digit := vlq & 31 vlq >>= 5 // If there are still more digits in this value, we must make sure the // continuation bit is marked if vlq != 0 { digit |= 32 } encoded = append(encoded, base64[digit]) if vlq == 0 { break } } return encoded } func DecodeVLQ(encoded []byte, start int) (int, int) { shift := 0 vlq := 0 // Scan over the input for { index := bytes.IndexByte(base64, encoded[start]) if index < 0 { break } // Decode a single byte vlq |= (index & 31) << shift start++ shift += 5 // Stop if there's no continuation bit if (index & 32) == 0 { break } } // Recover the value value := vlq >> 1 if (vlq & 1) != 0 { value = -value } return value, start } func DecodeVLQUTF16(encoded []uint16) (int, int, bool) { n := len(encoded) if n == 0 { return 0, 0, false } // Scan over the input current := 0 shift := 0 vlq := 0 for { if current >= n { return 0, 0, false } index := bytes.IndexByte(base64, byte(encoded[current])) if index < 0 { return 0, 0, false } // Decode a single byte vlq |= (index & 31) << shift current++ shift += 5 // Stop if there's no continuation bit if (index & 32) == 0 { break } } // Recover the value var value = vlq >> 1 if (vlq & 1) != 0 { value = -value } return value, current, true } type LineColumnOffset struct { Lines int Columns int } func (a LineColumnOffset) ComesBefore(b LineColumnOffset) bool { return a.Lines < b.Lines || (a.Lines == b.Lines && a.Columns < b.Columns) } func (a *LineColumnOffset) Add(b LineColumnOffset) { if b.Lines == 0 { a.Columns += b.Columns } else { a.Lines += b.Lines a.Columns = b.Columns } } func (offset *LineColumnOffset) AdvanceBytes(bytes []byte) { columns := offset.Columns for len(bytes) > 0 { c, width := utf8.DecodeRune(bytes) bytes = bytes[width:] switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' && len(bytes) > 0 && bytes[0] == '\n' { columns++ continue } offset.Lines++ columns = 0 default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { columns++ } else { columns += 2 } } } offset.Columns = columns } func (offset *LineColumnOffset) AdvanceString(text string) { columns := offset.Columns for i, c := range text { switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' && i+1 < len(text) && text[i+1] == '\n' { columns++ continue } offset.Lines++ columns = 0 default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { columns++ } else { columns += 2 } } } offset.Columns = columns } type SourceMapPieces struct { Prefix []byte Mappings []byte Suffix []byte } func (pieces SourceMapPieces) HasContent() bool { return len(pieces.Prefix)+len(pieces.Mappings)+len(pieces.Suffix) > 0 } type SourceMapShift struct { Before LineColumnOffset After LineColumnOffset } func (pieces SourceMapPieces) Finalize(shifts []SourceMapShift) []byte { // An optimized path for when there are no shifts if len(shifts) == 1 { bytes := pieces.Prefix minCap := len(bytes) + len(pieces.Mappings) + len(pieces.Suffix) if cap(bytes) < minCap { bytes = append(make([]byte, 0, minCap), bytes...) } bytes = append(bytes, pieces.Mappings...) bytes = append(bytes, pieces.Suffix...) return bytes } startOfRun := 0 current := 0 generated := LineColumnOffset{} prevShiftColumnDelta := 0 j := helpers.Joiner{} // Start the source map j.AddBytes(pieces.Prefix) // This assumes that a) all mappings are valid and b) all mappings are ordered // by increasing generated position. This should be the case for all mappings // generated by esbuild, which should be the only mappings we process here. for current < len(pieces.Mappings) { // Handle a line break if pieces.Mappings[current] == ';' { generated.Lines++ generated.Columns = 0 prevShiftColumnDelta = 0 current++ continue } potentialEndOfRun := current // Read the generated column generatedColumnDelta, next := DecodeVLQ(pieces.Mappings, current) generated.Columns += generatedColumnDelta current = next potentialStartOfRun := current // Skip over the original position information _, current = DecodeVLQ(pieces.Mappings, current) // The original source _, current = DecodeVLQ(pieces.Mappings, current) // The original line _, current = DecodeVLQ(pieces.Mappings, current) // The original column // Skip a trailing comma if current < len(pieces.Mappings) && pieces.Mappings[current] == ',' { current++ } // Detect crossing shift boundaries didCrossBoundary := false for len(shifts) > 1 && shifts[1].Before.ComesBefore(generated) { shifts = shifts[1:] didCrossBoundary = true } if !didCrossBoundary { continue } // This shift isn't relevant if the next mapping after this shift is on a // following line. In that case, don't split and keep scanning instead. shift := shifts[0] if shift.After.Lines != generated.Lines { continue } // Add all previous mappings in a single run for efficiency. Since source // mappings are relative, no data needs to be modified inside this run. j.AddBytes(pieces.Mappings[startOfRun:potentialEndOfRun]) // Then modify the first mapping across the shift boundary with the updated // generated column value. It's simplest to only support column shifts. This // is reasonable because import paths should not contain newlines. if shift.Before.Lines != shift.After.Lines { panic("Unexpected line change when shifting source maps") } shiftColumnDelta := shift.After.Columns - shift.Before.Columns j.AddBytes(EncodeVLQ(generatedColumnDelta + shiftColumnDelta - prevShiftColumnDelta)) prevShiftColumnDelta = shiftColumnDelta // Finally, start the next run after the end of this generated column offset startOfRun = potentialStartOfRun } // Finish the source map j.AddBytes(pieces.Mappings[startOfRun:]) j.AddBytes(pieces.Suffix) return j.Done() } // Coordinates in source maps are stored using relative offsets for size // reasons. When joining together chunks of a source map that were emitted // in parallel for different parts of a file, we need to fix up the first // segment of each chunk to be relative to the end of the previous chunk. type SourceMapState struct { // This isn't stored in the source map. It's only used by the bundler to join // source map chunks together correctly. GeneratedLine int // These are stored in the source map in VLQ format. GeneratedColumn int SourceIndex int OriginalLine int OriginalColumn int } // Source map chunks are computed in parallel for speed. Each chunk is relative // to the zero state instead of being relative to the end state of the previous // chunk, since it's impossible to know the end state of the previous chunk in // a parallel computation. // // After all chunks are computed, they are joined together in a second pass. // This rewrites the first mapping in each chunk to be relative to the end // state of the previous chunk. func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startState SourceMapState, sourceMap []byte) { // Handle line breaks in between this mapping and the previous one if startState.GeneratedLine != 0 { j.AddBytes(bytes.Repeat([]byte{';'}, startState.GeneratedLine)) prevEndState.GeneratedColumn = 0 } // Skip past any leading semicolons, which indicate line breaks semicolons := 0 for sourceMap[semicolons] == ';' { semicolons++ } if semicolons > 0 { j.AddBytes(sourceMap[:semicolons]) sourceMap = sourceMap[semicolons:] prevEndState.GeneratedColumn = 0 startState.GeneratedColumn = 0 } // Strip off the first mapping from the buffer. The first mapping should be // for the start of the original file (the printer always generates one for // the start of the file). generatedColumn, i := DecodeVLQ(sourceMap, 0) sourceIndex, i := DecodeVLQ(sourceMap, i) originalLine, i := DecodeVLQ(sourceMap, i) originalColumn, i := DecodeVLQ(sourceMap, i) sourceMap = sourceMap[i:] // Rewrite the first mapping to be relative to the end state of the previous // chunk. We now know what the end state is because we're in the second pass // where all chunks have already been generated. startState.SourceIndex += sourceIndex startState.GeneratedColumn += generatedColumn startState.OriginalLine += originalLine startState.OriginalColumn += originalColumn j.AddBytes(appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState)) // Then append everything after that without modification. j.AddBytes(sourceMap) } func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState) []byte { // Put commas in between mappings if lastByte != 0 && lastByte != ';' && lastByte != '"' { buffer = append(buffer, ',') } // Record the generated column (the line is recorded using ';' elsewhere) buffer = append(buffer, EncodeVLQ(currentState.GeneratedColumn-prevState.GeneratedColumn)...) prevState.GeneratedColumn = currentState.GeneratedColumn // Record the generated source buffer = append(buffer, EncodeVLQ(currentState.SourceIndex-prevState.SourceIndex)...) prevState.SourceIndex = currentState.SourceIndex // Record the original line buffer = append(buffer, EncodeVLQ(currentState.OriginalLine-prevState.OriginalLine)...) prevState.OriginalLine = currentState.OriginalLine // Record the original column buffer = append(buffer, EncodeVLQ(currentState.OriginalColumn-prevState.OriginalColumn)...) prevState.OriginalColumn = currentState.OriginalColumn return buffer } type LineOffsetTable struct { // The source map specification is very loose and does not specify what // column numbers actually mean. The popular "source-map" library from Mozilla // appears to interpret them as counts of UTF-16 code units, so we generate // those too for compatibility. // // We keep mapping tables around to accelerate conversion from byte offsets // to UTF-16 code unit counts. However, this mapping takes up a lot of memory // and generates a lot of garbage. Since most JavaScript is ASCII and the // mapping for ASCII is 1:1, we avoid creating a table for ASCII-only lines // as an optimization. columnsForNonASCII []int32 byteOffsetToFirstNonASCII int32 byteOffsetToStartOfLine int32 } func GenerateLineOffsetTables(contents string, approximateLineCount int32) []LineOffsetTable { var ColumnsForNonASCII []int32 ByteOffsetToFirstNonASCII := int32(0) lineByteOffset := 0 columnByteOffset := 0 column := int32(0) // Preallocate the top-level table using the approximate line count from the lexer lineOffsetTables := make([]LineOffsetTable, 0, approximateLineCount) for i, c := range contents { // Mark the start of the next line if column == 0 { lineByteOffset = i } // Start the mapping if this character is non-ASCII if c > 0x7F && ColumnsForNonASCII == nil { columnByteOffset = i - lineByteOffset ByteOffsetToFirstNonASCII = int32(columnByteOffset) ColumnsForNonASCII = []int32{} } // Update the per-byte column offsets if ColumnsForNonASCII != nil { for lineBytesSoFar := i - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ { ColumnsForNonASCII = append(ColumnsForNonASCII, column) } } switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' && i+1 < len(contents) && contents[i+1] == '\n' { column++ continue } lineOffsetTables = append(lineOffsetTables, LineOffsetTable{ byteOffsetToStartOfLine: int32(lineByteOffset), byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII, columnsForNonASCII: ColumnsForNonASCII, }) columnByteOffset = 0 ByteOffsetToFirstNonASCII = 0 ColumnsForNonASCII = nil column = 0 default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { column++ } else { column += 2 } } } // Mark the start of the next line if column == 0 { lineByteOffset = len(contents) } // Do one last update for the column at the end of the file if ColumnsForNonASCII != nil { for lineBytesSoFar := len(contents) - lineByteOffset; columnByteOffset <= lineBytesSoFar; columnByteOffset++ { ColumnsForNonASCII = append(ColumnsForNonASCII, column) } } lineOffsetTables = append(lineOffsetTables, LineOffsetTable{ byteOffsetToStartOfLine: int32(lineByteOffset), byteOffsetToFirstNonASCII: ByteOffsetToFirstNonASCII, columnsForNonASCII: ColumnsForNonASCII, }) return lineOffsetTables } type Chunk struct { Buffer []byte // This end state will be used to rewrite the start of the following source // map chunk so that the delta-encoded VLQ numbers are preserved. EndState SourceMapState // There probably isn't a source mapping at the end of the file (nor should // there be) but if we're appending another source map chunk after this one, // we'll need to know how many characters were in the last line we generated. FinalGeneratedColumn int ShouldIgnore bool } type ChunkBuilder struct { inputSourceMap *SourceMap sourceMap []byte lineOffsetTables []LineOffsetTable prevState SourceMapState lastGeneratedUpdate int generatedColumn int prevLoc logger.Loc hasPrevState bool // This is a workaround for a bug in the popular "source-map" library: // https://github.com/mozilla/source-map/issues/261. The library will // sometimes return null when querying a source map unless every line // starts with a mapping at column zero. // // The workaround is to replicate the previous mapping if a line ends // up not starting with a mapping. This is done lazily because we want // to avoid replicating the previous mapping if we don't need to. lineStartsWithMapping bool coverLinesWithoutMappings bool } func MakeChunkBuilder(inputSourceMap *SourceMap, lineOffsetTables []LineOffsetTable) ChunkBuilder { return ChunkBuilder{ inputSourceMap: inputSourceMap, prevLoc: logger.Loc{Start: -1}, lineOffsetTables: lineOffsetTables, // We automatically repeat the previous source mapping if we ever generate // a line that doesn't start with a mapping. This helps give files more // complete mapping coverage without gaps. // // However, we probably shouldn't do this if the input file has a nested // source map that we will be remapping through. We have no idea what state // that source map is in and it could be pretty scrambled. // // I've seen cases where blindly repeating the last mapping for subsequent // lines gives very strange and unhelpful results with source maps from // other tools. coverLinesWithoutMappings: inputSourceMap == nil, } } func (b *ChunkBuilder) AddSourceMapping(loc logger.Loc, output []byte) { if loc == b.prevLoc { return } b.prevLoc = loc // Binary search to find the line lineOffsetTables := b.lineOffsetTables count := len(lineOffsetTables) originalLine := 0 for count > 0 { step := count / 2 i := originalLine + step if lineOffsetTables[i].byteOffsetToStartOfLine <= loc.Start { originalLine = i + 1 count = count - step - 1 } else { count = step } } originalLine-- // Use the line to compute the column line := &lineOffsetTables[originalLine] originalColumn := int(loc.Start - line.byteOffsetToStartOfLine) if line.columnsForNonASCII != nil && originalColumn >= int(line.byteOffsetToFirstNonASCII) { originalColumn = int(line.columnsForNonASCII[originalColumn-int(line.byteOffsetToFirstNonASCII)]) } b.updateGeneratedLineAndColumn(output) // If this line doesn't start with a mapping and we're about to add a mapping // that's not at the start, insert a mapping first so the line starts with one. if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.generatedColumn > 0 && b.hasPrevState { b.appendMappingWithoutRemapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: 0, SourceIndex: b.prevState.SourceIndex, OriginalLine: b.prevState.OriginalLine, OriginalColumn: b.prevState.OriginalColumn, }) } b.appendMapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: b.generatedColumn, OriginalLine: originalLine, OriginalColumn: originalColumn, }) // This line now has a mapping on it, so don't insert another one b.lineStartsWithMapping = true } func (b *ChunkBuilder) GenerateChunk(output []byte) Chunk { b.updateGeneratedLineAndColumn(output) shouldIgnore := true for _, c := range b.sourceMap { if c != ';' { shouldIgnore = false break } } return Chunk{ Buffer: b.sourceMap, EndState: b.prevState, FinalGeneratedColumn: b.generatedColumn, ShouldIgnore: shouldIgnore, } } // Scan over the printed text since the last source mapping and update the // generated line and column numbers func (b *ChunkBuilder) updateGeneratedLineAndColumn(output []byte) { for i, c := range string(output[b.lastGeneratedUpdate:]) { switch c { case '\r', '\n', '\u2028', '\u2029': // Handle Windows-specific "\r\n" newlines if c == '\r' { newlineCheck := b.lastGeneratedUpdate + i + 1 if newlineCheck < len(output) && output[newlineCheck] == '\n' { continue } } // If we're about to move to the next line and the previous line didn't have // any mappings, add a mapping at the start of the previous line. if b.coverLinesWithoutMappings && !b.lineStartsWithMapping && b.hasPrevState { b.appendMappingWithoutRemapping(SourceMapState{ GeneratedLine: b.prevState.GeneratedLine, GeneratedColumn: 0, SourceIndex: b.prevState.SourceIndex, OriginalLine: b.prevState.OriginalLine, OriginalColumn: b.prevState.OriginalColumn, }) } b.prevState.GeneratedLine++ b.prevState.GeneratedColumn = 0 b.generatedColumn = 0 b.sourceMap = append(b.sourceMap, ';') // This new line doesn't have a mapping yet b.lineStartsWithMapping = false default: // Mozilla's "source-map" library counts columns using UTF-16 code units if c <= 0xFFFF { b.generatedColumn++ } else { b.generatedColumn += 2 } } } b.lastGeneratedUpdate = len(output) } func (b *ChunkBuilder) appendMapping(currentState SourceMapState) { // If the input file had a source map, map all the way back to the original if b.inputSourceMap != nil { mapping := b.inputSourceMap.Find( int32(currentState.OriginalLine), int32(currentState.OriginalColumn)) // Some locations won't have a mapping if mapping == nil { return } currentState.SourceIndex = int(mapping.SourceIndex) currentState.OriginalLine = int(mapping.OriginalLine) currentState.OriginalColumn = int(mapping.OriginalColumn) } b.appendMappingWithoutRemapping(currentState) } func (b *ChunkBuilder) appendMappingWithoutRemapping(currentState SourceMapState) { var lastByte byte if len(b.sourceMap) != 0 { lastByte = b.sourceMap[len(b.sourceMap)-1] } b.sourceMap = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState) b.prevState = currentState b.hasPrevState = true } ================================================ FILE: lib/esbuild/test/diff.go ================================================ package test import ( "fmt" "strings" "github.com/withastro/compiler/lib/esbuild/logger" ) func diff(old string, new string, color bool) string { return strings.Join(diffRec(nil, strings.Split(old, "\n"), strings.Split(new, "\n"), color), "\n") } // This is a simple recursive line-by-line diff implementation func diffRec(result []string, old []string, new []string, color bool) []string { o, n, common := lcSubstr(old, new) if common == 0 { // Everything changed for _, line := range old { if color { result = append(result, fmt.Sprintf("%s-%s%s", logger.TerminalColors.Red, line, logger.TerminalColors.Reset)) } else { result = append(result, "-"+line) } } for _, line := range new { if color { result = append(result, fmt.Sprintf("%s+%s%s", logger.TerminalColors.Green, line, logger.TerminalColors.Reset)) } else { result = append(result, "+"+line) } } } else { // Something in the middle stayed the same result = diffRec(result, old[:o], new[:n], color) for _, line := range old[o : o+common] { if color { result = append(result, fmt.Sprintf("%s %s%s", logger.TerminalColors.Dim, line, logger.TerminalColors.Reset)) } else { result = append(result, " "+line) } } result = diffRec(result, old[o+common:], new[n+common:], color) } return result } // From: https://en.wikipedia.org/wiki/Longest_common_substring_problem func lcSubstr(S []string, T []string) (int, int, int) { r := len(S) n := len(T) Lprev := make([]int, n) Lnext := make([]int, n) z := 0 retI := 0 retJ := 0 for i := 0; i < r; i++ { for j := 0; j < n; j++ { if S[i] == T[j] { if j == 0 { Lnext[j] = 1 } else { Lnext[j] = Lprev[j-1] + 1 } if Lnext[j] > z { z = Lnext[j] retI = i + 1 retJ = j + 1 } } else { Lnext[j] = 0 } } Lprev, Lnext = Lnext, Lprev } return retI - z, retJ - z, z } ================================================ FILE: lib/esbuild/test/util.go ================================================ package test import ( "fmt" "testing" "github.com/withastro/compiler/lib/esbuild/logger" ) func AssertEqual(t *testing.T, observed interface{}, expected interface{}) { t.Helper() if observed != expected { t.Fatalf("%s != %s", observed, expected) } } func AssertEqualWithDiff(t *testing.T, observed interface{}, expected interface{}) { t.Helper() if observed != expected { stringA := fmt.Sprintf("%v", observed) stringB := fmt.Sprintf("%v", expected) color := true t.Fatal(diff(stringB, stringA, color)) } } func SourceForTest(contents string) logger.Source { return logger.Source{ Index: 0, KeyPath: logger.Path{Text: "<stdin>"}, PrettyPath: "<stdin>", Contents: contents, IdentifierName: "stdin", } } ================================================ FILE: package.json ================================================ { "name": "root", "version": "0.0.0", "private": true, "repository": { "type": "git", "url": "https://github.com/withastro/compiler.git" }, "scripts": { "build": "make wasm", "build:compiler": "pnpm --filter @astrojs/compiler run build", "build:all": "pnpm run build && pnpm run build:compiler", "check": "biome check", "ci": "biome ci --diagnostic-level=warn", "check:write": "biome check --write", "prerelease": "pnpm run build:compiler", "release": "changeset publish", "test": "tsx node_modules/uvu/bin.js packages test -i utils -i stress", "test:only": "tsx node_modules/uvu/bin.js packages", "test:stress": "tsx packages/compiler/test/stress/index.ts", "test:ci": "pnpm run test && pnpm run test:stress" }, "packageManager": "pnpm@10.22.0", "workspaces": [ "packages/*" ], "devDependencies": { "@biomejs/biome": "1.8.1", "@changesets/cli": "^2.25.0", "sass": "^1.55.0", "tsx": "^4.16.2", "typescript": "~5.5.3", "uvu": "^0.5.6" }, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" } } ================================================ FILE: packages/compiler/.gitignore ================================================ dist wasm ================================================ FILE: packages/compiler/CHANGELOG.md ================================================ # @astrojs/compiler ## 3.0.1 ### Patch Changes - 05ef961: Fix scoped CSS nesting so descendant selectors without `&` inside nested rules are not incorrectly re-scoped. ## 3.0.0 ### Major Changes - c05e16e: Removes the first argument of `$$result.createAstro()` `$$result.createAstro()` does not accept an `AstroGlobalPartial` as the first argument anymore: ```diff -const Astro = $$result.createAstro($$Astro, $$props, $$slots); +const Astro = $$result.createAstro($$props, $$slots); ``` - c05e16e: Removes `renderScript` from `TransformOptions`. It is now the default and only behavior - c05e16e: Removes `experimentalScriptOrder` from `TransformOptions`. It is now the default and only behavior ### Patch Changes - 811e90f: Fixes an issue where `server:defer` was treated like a transition directive, causing ViewTransitions CSS to be included even when no `transition:*` directives were used. - 755f046: Fixes a CSS scoping regression where selectors using the nesting selector (`&`) with pseudo-classes or pseudo-elements (e.g. `&:last-of-type`, `&::before`) inside `:global()` contexts would incorrectly receive a duplicate scope attribute. - f89451a: Fixed an issue where explicit `<html>` and `<head>` tags were removed from output when a JSX comment appeared between DOCTYPE and the `<html>` tag. - 8275bdd: Fixes a bug where trailing whitespaces were preserved before `<style>` tags after transformation, in certain cases. Now trailing whitespaces are correctly removed. - 56ef0ca: Fixes TSX output to transform top-level returns into throws in order to avoid downstream TypeScript parsing issues - e329d20: Fix slot attribute stripped inside expression - 02de370: fixed a bug where the Astro compiler incorrectly handled the 'as' property name in Props interfaces. This allows Astro components to use 'as' as a prop name (common pattern for polymorphic components) without breaking TypeScript type inference. The Props type is now correctly preserved when destructuring objects with an 'as' property. - 615eb21: Fix CSS nesting so nested selectors without an ampersand are parsed and scoped correctly. ## 3.0.0-beta.1 ### Patch Changes - 755f046: Fixes a CSS scoping regression where selectors using the nesting selector (`&`) with pseudo-classes or pseudo-elements (e.g. `&:last-of-type`, `&::before`) inside `:global()` contexts would incorrectly receive a duplicate scope attribute. - f89451a: Fixed an issue where explicit `<html>` and `<head>` tags were removed from output when a JSX comment appeared between DOCTYPE and the `<html>` tag. - 8275bdd: Fixes a bug where trailing whitespaces were preserved before `<style>` tags after transformation, in certain cases. Now trailing whitespaces are correctly removed. - e329d20: Fix slot attribute stripped inside expression - 615eb21: Fix CSS nesting so nested selectors without an ampersand are parsed and scoped correctly. ## 3.0.0-beta.0 ### Major Changes - c05e16e: Removes the first argument of `$$result.createAstro()` `$$result.createAstro()` does not accept an `AstroGlobalPartial` as the first argument anymore: ```diff -const Astro = $$result.createAstro($$Astro, $$props, $$slots); +const Astro = $$result.createAstro($$props, $$slots); ``` - c05e16e: Removes `renderScript` from `TransformOptions`. It is now the default and only behavior - c05e16e: Removes `experimentalScriptOrder` from `TransformOptions`. It is now the default and only behavior ## 2.13.1 ### Patch Changes - 357b8fe: Fixes a panic when parsing files with a closing frontmatter fence (---) but no opening fence. The compiler now returns a helpful diagnostic error instead of crashing. - cba568f: Fixes the "Unterminated string literal" error when using multiline attribute values on components. ## 2.13.0 ### Minor Changes - 59f77593: Support HTML <selectedcontent> element Based on the recent commit history, this change appears to be related to fixing issue #1093 regarding selectedcontent parsing in customizable selects. The <selectedcontent> element is part of the new Customizable Select Element API in HTML, used within <selectlist> elements to display the currently selected option(s). - 89c80fee: Adds a `walkAsync` utility function that returns a Promise from the tree traversal process. Unlike the existing `walk` function which doesn't provide a way to wait for traversal completion, `walkAsync` allows consumers to `await` the full traversal of the AST. ### Patch Changes - 2a27aca7: Fixes a potential parsing issue with head content defined in a component where another component is rendered first. - 1264286c: Fixes a CSS scoping issue when a selector contains only pseudo selectors. ## 2.12.2 ### Patch Changes - 950635e: Reverts a change where view transitions were made async to accomodate the CSP requirements. ## 2.12.1 ### Patch Changes - 138c07f: Improves detection of function body opening curly brace for exported functions. - 4a967ab: Fixes a bug where view transition names got lost after update to Astro 5.9 ## 2.12.0 ### Minor Changes - e428ae0: Add head propagation metadata to server islands ## 2.11.0 ### Minor Changes - 0399d55: Add an experimental flag `experimentalScriptOrder` that corrects the order styles & scripts are rendered within a component. When enabled, the order styles & scripts are rendered will be consistent with the order they are defined. ### Patch Changes - c758d7e: Add async properly when await used inside fragment ## 2.10.4 ### Patch Changes - 8cae811: Fixes an issue with the conditional rendering of scripts. **This change updates a v5.0 breaking change when `experimental.directRenderScript` became the default script handling behavior.** If you have already successfully upgraded to Astro v5, you may need to review your script tags again and make sure they still behave as desired after this release. [See the v5 Upgrade Guide for more details.](https://docs.astro.build/en/guides/upgrade-to/v5/#script-tags-are-rendered-directly-as-declared) - 970f085: Fixes an issue when parsing elements inside foreign content (e.g. SVG), when they were inside an expression - 6b6a134: Fixes a bug caused by having an extra space in the fragment tag in the TSX output ## 2.10.3 ### Patch Changes - 5d0023d: Fixes sourcemapping for CRLF line endings wrongfully including the last character - f55a2af: Resolves an issue where the `class:list` directive was not correctly merging with the class attribute. ## 2.10.2 ### Patch Changes - f05a7cc: Adjust TSX output to return ranges using UTF-16 code units, as it would in JavaScript ## 2.10.1 ### Patch Changes - 21b7b95: Revert the transformation of top-level returns into throws in TSX as it was buggy in numerous situations - af471f5: Fixes positions for extracted tags being wrong when using IncludeStyles and IncludeScripts set to false ## 2.10.0 ### Minor Changes - 1d684b1: Adds detected language to extracted style tags in TSX ### Patch Changes - 7fa6577: Transform top level returns into throws in the TSX output ## 2.9.2 ### Patch Changes - a765f47: Escape script tags with unknown types ## 2.9.1 ### Patch Changes - 9549bb7: Fixes style and script tags sometimes being forcefully put into the body / head tags in the AST ## 2.9.0 ### Minor Changes - 3e25858: Adds two new options to `convertToTSX`: `includeScripts` and `includeStyles`. These options allow you to optionally remove scripts and styles from the output TSX file. Additionally this PR makes it so scripts and styles metadata are now included in the `metaRanges` property of the result of `convertToTSX`. This is notably useful in order to extract scripts and styles from the output TSX file into separate files for language servers. - 9fb8d5d: Adds `serverComponents` metadata This adds a change necessary to support server islands. During transformation the compiler discovers `server:defer` directives and appends them to the `serverComponents` array. This is exported along with the other metadata so that it can be used inside of Astro. ## 2.8.2 ### Patch Changes - 6b7c12f: Avoids stringifying `undefined` in scoped class attributes - 8803da6: Fixes newlines in opening tag generating buggy code in TSX ## 2.8.1 ### Patch Changes - 0bb2746: Allow `data-astro-reload` to take a value ## 2.8.0 ### Minor Changes - 17f8932: The WASM binaries for the compiler are now built using Go 1.22. ### Patch Changes - e8b6cdf: Skips printing `createAstro` code if the `Astro` global is not referenced - ecd7e90: Skips printing `async` for component functions if `await` is not used ## 2.7.1 ### Patch Changes - 5467f40: Fix issue with head content being pushed into body - d587ca6: Adds warnings indicating that the `data-astro-rerun` attribute can not be used on an external module `<script>` and that `data-astro-reload` is only supported on `<a>`, `<area>` and `<form>` elements. ## 2.7.0 ### Minor Changes - 50fc0a9: Implement the `transition:persist-props` transformation ### Patch Changes - f45dbfd: Updates deprecated Node.js 16 github actions. ## 2.6.0 ### Minor Changes - a90d99e: Adds a new `renderScript` option to render non-inline script tags using a `renderScript` function from `internalURL`, instead of stripping the script entirely ### Patch Changes - 6ffa54b: Fix TSX output prefixing output with unnecessary jsdoc comment - 86221d6: Adds a lint rule to display a message when attributes are added to a script tag, explaining that the script will be treated as `is:inline`. ## 2.5.3 ### Patch Changes - c17734d: Rollbacks the dynamic slot generation feature to rework it ## 2.5.2 ### Patch Changes - 418558c: Fixes an issue where a slotted element in an expression would cause subsequent ones to be incorrectly printed - db93975: Fixes an issue where an expression inside a `th` tag would cause an infinite loop ## 2.5.1 ### Patch Changes - d071b0b: Fixes an issue which caused the hydration script of default exported components to fail loading in some cases. ## 2.5.0 ### Minor Changes - db13db9: - Adds support for dynamic slots inside loops - Fixes an issue where successive named slotted elements would cause a runtime error - Fixes an issue in which if there was an implicit default slotted element next to named one, the former would get swallowed by the later. ## 2.4.2 ### Patch Changes - 9938bc1: Fixes a sourcemap-related crash when using multibyte characters ## 2.4.1 ### Patch Changes - 7a07089: Fixes a bug where expressions starting with whitespace, followed by anything else, weren't printed correctly. ## 2.4.0 ### Minor Changes - 9ff6342: Return generated frontmatter and body ranges in TSX output ### Patch Changes - b52f7d1: Fixes an issue where unterminated quoted attributes caused the compiler to crash - 24e2886: Fixes a regression that caused whitespace between elements in an expression to result invalid code - c5bcbd0: Prefix TSX output with a JSX pragma to ensure proper types are used - 4f74c05: Fixes an issue where HTML and JSX comments lead to subsequent content being incorrectly treated as plain text when they have parent expressions. - cad2606: Fixes an issue where components with template literal attributes were printed with the name of the attribute as value. - 14ccba5: Fixes an issue where a `tr` element which contained an expression would cause its parent table to swallow any trailing element inside said table - f9373f2: Fixes an issue where Astro fragments used inside a `table` element would cause lots of missing pieces of markup - 4de359b: Preserve whitespace in expressions - fe2f0c8: Fixes an issue where `/` or `*/` would cause prematurely closed comments in the tsx output ## 2.3.4 ### Patch Changes - 56e1959: Fixes a memory reference error when an expression is the final node in a file ## 2.3.3 ### Patch Changes - 5b450df: Fixed an `index out of range` error when multibyte characters were rendered as markup - 852fc1b: Fix `index out of range [0]` error when there is a component before the `<html>` tag - 05ecaff: Fixes an issue where when there are nested expressions, subsequent content was incorrectly treated as plain text in some cases. - 8c0cffb: Fixes an issue causing `index out of range` errors when handling some multibyte characters like `\u2028`. ## 2.3.2 ### Patch Changes - 2bdb4bb: Revert table related parsing change as it resulted in a regression ## 2.3.1 ### Patch Changes - e241f2d: Fix generated code for expressions within `td` elements - 5ce5cc6: Fix compact collapse for empty text nodes between elements ## 2.3.0 ### Minor Changes - 0c24ea1: Add a new `annotateSourceFile` option. This option makes it so the compiler will annotate every element with its source file location. This is notably useful for dev tools to be able to provide features like a "Open in editor" button. This option is disabled by default. ```html <div> <span>hello world</span> </div> ``` Results in: ```html <div data-astro-source-file="/Users/erika/Projects/..." data-astro-source-loc="1:1" > <span data-astro-source-file="/Users/erika/Projects/..." data-astro-source-loc="2:2" >hello world</span > </div> ``` In Astro, this option is enabled only in development mode. ## 2.2.2 ### Patch Changes - bf76663: [TSX] Add `ASTRO__MergeUnion` util to allow destructuring from automatically inferred union Prop types ## 2.2.1 ### Patch Changes - a52c181: Fixed an issue where spread attributes could not include double quotation marks. ## 2.2.0 ### Minor Changes - 7579d7c: Support CSS `@starting-style` rule (From: https://github.com/evanw/esbuild/pull/3249) - 09abfe4: Adds ability for TSX output to automatically infer `Astro.props` and `Astro.params` when `getStaticPaths` is used ## 2.1.0 ### Minor Changes - 2584348: Add propagation metadata to the TransformResult ## 2.0.1 ### Patch Changes - 4e1e907: Remove experimental flags from `transition:` directives. They are now enabled by default. ## 2.0.0 ### Major Changes - cd93272: The scope hash created by the compiler is now **lowercase**. This aligns with the HTML spec of the attribute names, where they are lowercase by spec. This change is needed because the compiler now creates data attributes that contain the hash in their name. ## 1.8.2 ### Patch Changes - 80b7e42: Pass the type of the current component as a type argument to the AstroGlobal in order to type Astro.self ## 1.8.1 ### Patch Changes - 52fe144: Change the value of the generated attribute ## 1.8.0 ### Minor Changes - 365710c: Support the transition:persist directive ## 1.7.0 ### Minor Changes - 5c19809: Add a `scopedStyleStrategy` called `"attribute"`. The strategy will print styles using data attributes. ## 1.6.3 ### Patch Changes - 6b4873d: Pass transition directives onto components ## 1.6.2 ### Patch Changes - ce5cf31: Pass transition:animate expressions ## 1.6.1 ### Patch Changes - 486614b: Fixes use of expression in transition:name ## 1.6.0 ### Minor Changes - 2906df2: Support for view transition directives This adds support for `transition:animate` and `transition:name` which get passed into the new `renderTransition` runtime function. ## 1.5.7 ### Patch Changes - 34fcf01: [TSX] escape additional invalid characters - 5fe952d: [TSX] fix sourcemaps for quoted attributes that span multiple lines ## 1.5.6 ### Patch Changes - 3d69f4e: [TSX] maintain trailing whitespace before an element is closed, fixing TypeScript completion in some cases ## 1.5.5 ### Patch Changes - 101c18e: [AST] Include end position for frontmatter node when it is the only item in the file - 35ccd5e: [AST] add raw attribute values to AST - 325d3c3: [TSX] fix compiler crash when file only contains an unamed fragment ## 1.5.4 ### Patch Changes - a35468a: Do not remove surrounding whitespace from text surrounded by newlines when `compressHTML` is enabled - 4aba173: Fix props detection when importing `Props` from another file (see [#814](https://github.com/withastro/compiler/issues/814)) ## 1.5.3 ### Patch Changes - 5a2ce3e: Update compiler output for `style` objects when used with `define:vars` ## 1.5.2 ### Patch Changes - 73a98c2: Fix `compressHTML` edge case when both leading and trailing whitespace is present ## 1.5.1 ### Patch Changes - a51227d: Move `declare const` for Props type at the bottom of the file to make mapping easier downstream ## 1.5.0 ### Minor Changes - 4255b03: Export package as dual CJS / ESM ### Patch Changes - ae67f1b: Apply `define:vars` to non-root elements ## 1.4.2 ### Patch Changes - e104c1c: Polyfill the entire crypto object if node >= v16.17.0 - 6f7b2f6: Fix crash when transforming files with Windows line endings ## 1.4.1 ### Patch Changes - 0803e86: Handle crashes when using `parse` and `convertToTSX` by restarting the service ## 1.4.0 ### Minor Changes - fc0f470: Implements the scopedStyleStrategy option ## 1.3.2 ### Patch Changes - 19c0176: Fix TSX sourcemapping for components using Windows-style line returns - b0e0cfd: Add a sync entrypoint ## 1.3.1 ### Patch Changes - e0baa85: Preserve whitespace in slots ## 1.3.0 ### Minor Changes - 95a6610: Expose the `convertToTSX` function in the compiler browser bundle - 6d168dd: Add ContainsHead flag for metadata ## 1.2.2 ### Patch Changes - a8a845f: Fix regression related to self-closing tags ## 1.2.1 ### Patch Changes - 348840b: Fix getStaticPaths export when used with a TypeScript type ([#4929](https://github.com/withastro/astro/issues/4929)) - 8ed067e: Fix parse error for multiline `export type` using Unions or Intersections - 6354e50: Improve handling of self-closing tags returned from expression - 5a5f91d: Fix `define:vars` when used with a `style` attribute - b637e9a: Fix ignored `form` elements after a `form` element that contains an expression - 2658ed4: Correctly apply style when `class` and `class:list` are both used ## 1.2.0 ### Minor Changes - b2cfd00: Add teardown API to remove WASM instance after using the compiler ## 1.1.2 ### Patch Changes - 2de6128: Preserve namespaced attributes when using expressions - af13f2d: Fix incorrect `convertToTSX` types. The function accepts `filename`, not `sourcefile`. - 5eb4fff: Compile `set:html` and `set:text` quoted and template literal attributes as strings ## 1.1.1 ### Patch Changes - 6765f01: Fix attributes starting with : not being properly transformed in the TSX output ## 1.1.0 ### Minor Changes - a75824d: Allow passing through result to slot call ## 1.0.2 ### Patch Changes - 0c27f3f: Collapse multiple trailing text nodes if present ## 1.0.1 ### Patch Changes - 94b2c02: Prevent insertion of maybeRenderHead on hoisted scripts ## 1.0.0 ### Major Changes - 8e86bc6: The Astro compiler is officially stable! This release is entirely ceremonial, the code is the same as [`@astrojs/compiler@0.33.0`](https://github.com/withastro/compiler/releases/tag/%40astrojs%2Fcompiler%400.33.0) ## 0.33.0 ### Minor Changes - 1adac72: Improve error recovery when using the `transform` function. The compiler will now properly reject the promise with a useful message and stacktrace rather than print internal errors to stdout. ### Patch Changes - 68d3c0c: Fix edge case where `export type` could hang the compiler - ec1ddf0: Handle edge case with TypeScript generics handling and our TSX output - 23d1fc0: Ignore trailing whitespace in components ## 0.32.0 ### Minor Changes - 2404848: Remove `pathname` option in favour of `sourcefile` option - 2ca86f6: Remove `site` and `projectRoot` options in favour of the `astroGlobalArgs` option - edd3e0e: Merge `sourcefile` and `moduleId` options as a single `filename` option. Add a new `normalizedFilename` option to generate stable hashes instead. - 08843bd: Remove `experimentalStaticExtraction` option. It is now the default. ## 0.31.4 ### Patch Changes - 960b853: Rename `SerializeOtions` interface to `SerializeOptions` - fcab891: Fixes export hoisting edge case - 47de01a: Handle module IDs containing quotes ## 0.31.3 ### Patch Changes - fd5cb57: Rollback https://github.com/withastro/compiler/pull/674 ## 0.31.2 ### Patch Changes - 89c0cee: fix: corner case that component in head expression will case body tag missing - 20497f4: Improve fidelity of sourcemaps for frontmatter ## 0.31.1 ### Patch Changes - 24dcf7e: Allow `script` and `style` before HTML - ef391fa: fix: corner case with slot expression in head will cause body tag missing ## 0.31.0 ### Minor Changes - abdddeb: Update Go to 1.19 ## 0.30.1 ### Patch Changes - ff9e7ba: Fix edge case where `<` was not handled properly inside of expressions - f31d535: Fix edge case with Prop detection for TSX output ## 0.30.0 ### Minor Changes - 963aaab: Provide the moduleId of the astro component ## 0.29.19 ### Patch Changes - 3365233: Replace internal tokenizer state logs with proper warnings ## 0.29.18 ### Patch Changes - 80de395: fix: avoid nil pointer dereference in table parsing - aa3ad9d: Fix `parse` output to properly account for the location of self-closing tags - b89dec4: Internally, replace `astro.ParseFragment` in favor of `astro.ParseFragmentWithOptions`. We now check whether an error handler is passed when calling `astro.ParseFragmentWithOptions` ## 0.29.17 ### Patch Changes - 1e7e098: Add warning for invalid spread attributes - 3cc6f55: Fix handling of unterminated template literal attributes - 48c5677: Update default `internalURL` to `astro/runtime/server/index.js` - 2893f33: Fix a number of `table` and `expression` related bugs ## 0.29.16 ### Patch Changes - ec745f4: Self-closing tags will now retrieve "end" positional data - a6c2822: Fix a few TSX output errors ## 0.29.15 ### Patch Changes - 5f6e69b: Fix expression literal handling ## 0.29.14 ### Patch Changes - 6ff1d80: Fix regression introduced by https://github.com/withastro/compiler/pull/617 ## 0.29.13 ### Patch Changes - 8f3e488: Fix regression introduced to `parse` handling in the last patch ## 0.29.12 ### Patch Changes - a41982a: Fix expression edge cases, improve literal parsing ## 0.29.11 ### Patch Changes - ee907f1: Fix #5308, duplicate style bug when using `define:vars` ## 0.29.10 ### Patch Changes - 07a65df: Print `\r` when printing TSX output - 1250d0b: Add warning when `define:vars` won't work because of compilation limitations ## 0.29.9 ### Patch Changes - 1fe92c0: Fix TSX sourcemaps on Windows (take 4) ## 0.29.8 ### Patch Changes - 01b62ea: Fix sourcemap bug on Windows (again x2) ## 0.29.7 ### Patch Changes - 108c6c9: Fix TSX sourcemap bug on Windows (again) ## 0.29.6 ### Patch Changes - 4b3fafa: Fix TSX sourcemaps on Windows ## 0.29.5 ### Patch Changes - 73a2b69: Use an IIFE for define:vars scripts ## 0.29.4 ### Patch Changes - 4381efa: Return proper diagnostic code for warnings ## 0.29.3 ### Patch Changes - 85e1d31: AST: move `start` position of elements to the first index of their opening tag ## 0.29.2 ### Patch Changes - 035829b: AST: move end position of elements to the last index of their end tag ## 0.29.1 ### Patch Changes - a99c014: Ensure comment and text nodes have end positions when generating an AST from `parse` ## 0.29.0 ### Minor Changes - fd2fc28: Fix some utf8 compatibility issues ### Patch Changes - 4b68670: TSX: fix edge case with spread attribute printing - 6b204bd: Fix bug with trailing `style` tags being moved into the `html` element - 66fe230: Fix: include element end location in `parse` AST ## 0.28.1 ### Patch Changes - aac8c89: Fix end tag sourcemappings for TSX mode - d7f3288: TSX: Improve self-closing tag behavior and mappings - 75dd7cc: Fix spread attribute mappings ## 0.28.0 ### Minor Changes - 5da0dc2: Add `resolvePath` option to control hydration path resolution - e816a61: Remove metadata export if `resolvePath` option provided ## 0.27.2 ### Patch Changes - 959f96b: Fix "missing sourcemap" issue - 94f6f3e: Fix edge case with multi-line comment usage - 85a654a: Fix `parse` causing a compiler panic when a component with a client directive was imported but didn't have a matching import - 5e32cbe: Improvements to TSX output ## 0.27.1 ### Patch Changes - cc9f174: fixed regression caused by #546 ## 0.27.0 ### Minor Changes - c770e7b: The compiler will now return `diagnostics` and unique error codes to be handled by the consumer. For example: ```js import type { DiagnosticSeverity, DiagnosticCode, } from "@astrojs/compiler/types"; import { transform } from "@astrojs/compiler"; async function run() { const { diagnostics } = await transform(file, opts); function log(severity: DiagnosticSeverity, message: string) { switch (severity) { case DiagnosticSeverity.Error: return console.error(message); case DiagnosticSeverity.Warning: return console.warn(message); case DiagnosticSeverity.Information: return console.info(message); case DiagnosticSeverity.Hint: return console.info(message); } } for (const diagnostic of diagnostics) { let message = diagnostic.text; if (diagnostic.hint) { message += `\n\n[hint] ${diagnostic.hint}`; } // Or customize messages for a known DiagnosticCode if (diagnostic.code === DiagnosticCode.ERROR_UNMATCHED_IMPORT) { message = `My custom message about an unmatched import!`; } log(diagnostic.severity, message); } } ``` ### Patch Changes - 0b24c24: Implement automatic typing for Astro.props in the TSX output ## 0.26.1 ### Patch Changes - 920898c: Handle edge case with `noscript` tags - 8ee78a6: handle slots that contains the head element - 244e43e: Do not hoist import inside object - b8cd954: Fix edge case with line comments and export hoisting - 52ebfb7: Fix parse/tsx output to gracefully handle invalid HTML (style outside of body, etc) - 884efc6: Fix edge case with multi-line export hoisting ## 0.26.0 ### Minor Changes - 0be58ab: Improve sourcemap support for TSX output ### Patch Changes - e065e29: Prevent head injection from removing script siblings ## 0.25.2 ### Patch Changes - 3a51b8e: Ensure that head injection occurs if there is only a hoisted script ## 0.25.1 ### Patch Changes - 41fae67: Do not scope empty style blocks - 1ab8280: fix(#517): fix edge case with TypeScript transform - a3678f9: Fix import.meta.env usage above normal imports ## 0.25.0 ### Minor Changes - 6446ea3: Make Astro styles being printed after user imports ### Patch Changes - 51bc60f: Fix edge cases with `getStaticPaths` where valid JS syntax was improperly handled ## 0.24.0 ### Minor Changes - 6ebcb4f: Allow preprocessStyle to return an error ### Patch Changes - abda605: Include filename when calculating scope ## 0.23.5 ### Patch Changes - 6bc8e0b: Prevent import assertion from being scanned too soon ## 0.23.4 ### Patch Changes - 3b9f0d2: Remove css print escape for experimentalStaticExtraction ## 0.23.3 ### Patch Changes - 7693d76: Fix resolution of .jsx modules ## 0.23.2 ### Patch Changes - 167ad21: Improve handling of namespaced components when they are multiple levels deep - 9283258: Fix quotations in pre-quoted attributes - 76fcef3: Better handling for imports which use special characters ## 0.23.1 ### Patch Changes - 79376f3: Fix regression with expression rendering ## 0.23.0 ### Minor Changes - d8448e2: Prevent printing the doctype in the JS output ### Patch Changes - a28c3d8: Fix handling of unbalanced quotes in expression attributes - 28d1d4d: Fix handling of TS generics inside of expressions - 356d3b6: Prevent wrapping module scripts with scope ## 0.22.1 ### Patch Changes - 973103c: Prevents unescaping attribute expressions ## 0.22.0 ### Minor Changes - 558c9dd: Generate a stable scoped class that does _NOT_ factor in local styles. This will allow us to safely do style HMR without needing to update the DOM as well. - c19cd8c: Update Astro's CSS scoping algorithm to implement zero-specificity scoping, according to [RFC0012](https://github.com/withastro/rfcs/blob/main/proposals/0012-scoped-css-with-preserved-specificity.md). ## 0.21.0 ### Minor Changes - 8960d82: New handling for `define:vars` scripts and styles ### Patch Changes - 4b318d5: Do not attempt to hoist styles or scripts inside of `<noscript>` - d6ebab6: Fixing missing semicolon on TSX Frontmatter last-entries ## 0.20.0 ### Minor Changes - 48d33ff: Removes compiler special casing for the Markdown component - 4a5352e: Removes limitation where imports/exports must be at the top of an `.astro` file. Fixes various edge cases around `getStaticPaths` hoisting. ### Patch Changes - 245d73e: Add support for HTML minification by passing `compact: true` to `transform`. - 3ecdd24: Update TSX output to also generate TSX-compatible code for attributes containing dots ## 0.19.0 ### Minor Changes - fcb4834: Removes fallback for the site configuration ### Patch Changes - 02add77: Fixes many edge cases around tables when used with components, slots, or expressions - b23dd4d: Fix handling of unmatched close brace in template literals - 9457a91: Fix issue with `{` in template literal attributes - c792161: Fix nested expression handling with a proper expression tokenizer stack ## 0.18.2 ### Patch Changes - f8547a7: Revert [#448](https://github.com/withastro/compiler/pull/448) for now ## 0.18.1 ### Patch Changes - aff2f23: Warning on client: usage on scripts ## 0.18.0 ### Minor Changes - 4b02776: Fix handling of `slot` attribute used inside of expressions ### Patch Changes - 62d2a8e: Properly handle nested expressions that return multiple elements - 571d6b9: Ensure `html` and `body` elements are scoped ## 0.17.1 ### Patch Changes - 3885217: Support `<slot is:inline />` and preserve slot attribute when not inside component - ea94a26: Fix issue with fallback content inside of slots ## 0.17.0 ### Minor Changes - 3a9d166: Add renderHead injection points ## 0.16.1 ### Patch Changes - 9fcc43b: Build JS during the release ## 0.16.0 ### Minor Changes - 470efc0: Adds component metadata to the TransformResult ### Patch Changes - c104d4f: Fix #418: duplicate text when only text ## 0.15.2 ### Patch Changes - f951822: Fix wasm `parse` to save attribute namespace - 5221e09: Fix serialize spread attribute ## 0.15.1 ### Patch Changes - 26cbcdb: Prevent side-effectual CSS imports from becoming module metadata ## 0.15.0 ### Minor Changes - 702e848: Trailing space at the end of Astro files is now stripped from Component output. ### Patch Changes - 3a1a24b: Fix long-standing bug where a `class` attribute inside of a spread prop will cause duplicate `class` attributes - 62faceb: Fixes an issue where curly braces in `<math>` elements would get parsed as expressions instead of raw text. ## 0.14.3 ### Patch Changes - 6177620: Fix edge case with expressions inside of tables - 79b1ed6: Provides a better error message when we can't match client:only usage to an import statement - a4e1957: Fix Astro scoping when `class:list` is used - fda859a: Fix json escape ## 0.14.2 ### Patch Changes - 6f30e2e: Fix edge case with nested expression inside `<>` - 15e3ff8: Fix panic when using a `<slot />` in `head` - c048567: Fix edge case with `select` elements and expression children - 13d2fc2: Fix #340, fixing behavior of content after an expression inside of `<select>` - 9e37a72: Fix issue when multiple client-only components are used - 67993d5: Add support for block comment only expressions, block comment only shorthand attributes and block comments in shorthand attributes - 59fbea2: Fix #343, edge case with `<tr>` inside component - 049dadf: Fix usage of expressions inside `caption` and `colgroup` elements ## 0.14.1 ### Patch Changes - 1a82892: Fix bug with `<script src>` not being hoisted ## 0.14.0 ### Minor Changes - c0da4fe: Implements [RFC0016](https://github.com/withastro/rfcs/blob/main/proposals/0016-style-script-defaults.md), the new `script` and `style` behavior. ## 0.13.2 ### Patch Changes - 014370d: Fix issue with named slots in <head> element - da831c1: Fix handling of RegExp literals in frontmatter ## 0.13.1 ### Patch Changes - 2f8334c: Update `parse` and `serialize` functions to combine `attributes` and `directives`, fix issue with `serialize` not respecting `attributes`. - b308955: Add self-close option to serialize util ## 0.13.0 ### Minor Changes - ce3f1a5: Update CSS parser to use `esbuild`, adding support for CSS nesting, `@container`, `@layer`, and other modern syntax features ### Patch Changes - 24a1185: Parser: Always output the `children` property in an element node, even if it has no children ## 0.12.1 ### Patch Changes - 097ac47: Parser: Always output the `attribute` property in an element node, even if empty - ad62437: Add `serialize` util - eb7eb95: Parse: fix escaping of `&` characters in AST output ## 0.12.0 ### Minor Changes - c6dd41d: Do not render implicit tags created during the parsing process - c6dd41d: Remove "as" option, treats all documents as fragments that generate no implicit tags - c6dd41d: Add `parse` function which generates an AST - c6dd41d: Adds support for `Astro.self` (as accepted in the [Recursive Components RFC](https://github.com/withastro/rfcs/blob/main/active-rfcs/0000-recursive-components.md)). ### Patch Changes - c6dd41d: Add `fragment` node types to AST definitions, expose Fragment helper to utils - c6dd41d: Adds metadata on client:only components - c6dd41d: Expose AST types via `@astrojs/compiler/types` - c6dd41d: Export `./types` rather than `./types.d.ts` - c6dd41d: Fix edge case with Fragment parsing in head, add `fragment` node to AST output - c6dd41d: Fix <slot> behavior inside of head - c6dd41d: Improve head injection behavior - ef0b4b3: Move `typescript` dependency to development dependencies, as it is not needed in the package runtime. - c6dd41d: Update exposed types - c6dd41d: Remove usage of `escapeHTML` util - c6dd41d: Export all types from shared types - c6dd41d: Fix `head` behavior and a bug related to ParseFragment - c6dd41d: Adds a warning when using an expression with a hoisted script ## 0.12.0-next.9 ### Patch Changes - 95ec808: Fix <slot> behavior inside of head - 95ec808: Remove usage of `escapeHTML` util ## 0.12.0-next.8 ### Patch Changes - 4497628: Improve head injection behavior ## 0.12.0-next.7 ### Patch Changes - e26b9d6: Fix edge case with Fragment parsing in head, add `fragment` node to AST output ## 0.12.0-next.6 ### Patch Changes - 37ef1c1: Fix `head` behavior and a bug related to ParseFragment ## 0.12.0-next.5 ### Patch Changes - 97cf66b: Adds metadata on client:only components ## 0.12.0-next.4 ### Patch Changes - e2061dd: Export all types from shared types ## 0.12.0-next.3 ### Patch Changes - ef69b74: Export `./types` rather than `./types.d.ts` ## 0.12.0-next.2 ### Patch Changes - 073b0f1: Adds a warning when using an expression with a hoisted script ## 0.12.0-next.1 ### Patch Changes - a539d53: Update exposed types ## 0.12.0-next.0 ### Minor Changes - 8ce39c7: Do not render implicit tags created during the parsing process - 41b825a: Remove "as" option, treats all documents as fragments that generate no implicit tags - 483b34b: Add `parse` function which generates an AST - 9e5e2f8: Adds support for `Astro.self` (as accepted in the [Recursive Components RFC](https://github.com/withastro/rfcs/blob/main/active-rfcs/0000-recursive-components.md)). ### Patch Changes - 16b167c: Expose AST types via `@astrojs/compiler/types` ## 0.11.4 ### Patch Changes - 99b5de2: Reset tokenizer state when a raw element that is self-closing is encountered. This fixes the handling of self-closing elements like `<title />` and `<script />` when used with `set:html`. ## 0.11.3 ### Patch Changes - dcf15bf: Fixes bug causing a crash when using Astro.resolve on a hoisted script ## 0.11.2 ### Patch Changes - 41cc6ef: Fix memory issue caused by duplicate WASM instantiations ## 0.11.1 ### Patch Changes - 4039682: Fixes hoist script tracking when passed a variable ## 0.11.0 ### Minor Changes - f5d4006: Switch from TinyGo to Go's built-in WASM output. While this is an unfortunate size increase for our `.wasm` file, it should also be significantly more stable and cut down on hard-to-reproduce bugs. Please see https://github.com/withastro/compiler/pull/291 for more details. ## 0.11.0-next--wasm.0 ### Minor Changes - 9212ccc: Switch from TinyGo to Go's built-in WASM output. While this is an unfortunate size increase for our `WASM` file, it should also be significantly more stable and cut down on hard-to-reproduce bugs. Please see https://github.com/withastro/compiler/pull/291 for more details. ## 0.10.2 ### Patch Changes - 7f7c65c: Fix conditional rendering for special elements like `iframe` and `noscript` - 9d789c9: Fix handling of nested template literals inside of expressions - 5fa9e53: Fix handling of special characters inside of expressions - 8aaa956: Formalize support for magic `data-astro-raw` attribute with new, official `is:raw` directive - c698350: Improve MathML support. `{}` inside of `<math>` is now treated as raw text rather than an expression construct. ## 0.10.1 ### Patch Changes - 38ae39a: Add support for `set:html` and `set:text` directives, as designed in the [`set:html` RFC](https://github.com/withastro/rfcs/blob/main/active-rfcs/0000-set-html.md). ## 0.10.0 ### Minor Changes - 02d41a8: Adds support for `Astro.self` (as accepted in the [Recursive Components RFC](https://github.com/withastro/rfcs/blob/main/active-rfcs/0000-recursive-components.md)). ### Patch Changes - 4fe522b: Fixes inclusion of define:vars scripts/styles using the StaticExtraction flag ## 0.9.2 ### Patch Changes - 92cc76b: Fix wasm build for use in Astro ## 0.9.1 ### Patch Changes - 85d35a5: Revert previous change that broke Windows ## 0.9.0 ### Minor Changes - c1a0172: changing raw_with_expression_loop in tokenizer to only handle string that has '`' differently otherwise it should treat it as normal string ### Patch Changes - 1fa2162: Improved types for TransformResult with hoisted scripts ## 0.8.2 ### Patch Changes - 502f8b8: Adds a new property, `scripts`, to `TransformResult` ## 0.8.1 ### Patch Changes - cd277e2: Fix bug with data-astro-raw detection ## 0.8.0 ### Minor Changes - 3690968: Passes the Pathname to createAstro instead of import.meta.url ## 0.7.4 ### Patch Changes - afc1e82: Remove console log (sorry!) ## 0.7.3 ### Patch Changes - cc24069: Fix some edge cases with expressions inside of `<table>` elements - 086275c: Fix edge case with textarea inside expression ## 0.7.2 ### Patch Changes - 899e48d: Fix issue with active formatting elements by marking expressions as unique scopes ## 0.7.1 ### Patch Changes - fa039dd: Fix tokenization of attribute expression containing the solidus (`/`) character - e365c3c: Fix bug with expressions inside of <table> elements (without reverting a previous fix to expressions inside of <a> elements) - 7c5889f: Fix bug with `@keyframes` scoping - df74ab3: Fix bug where named grid columns (like `[content-start]`) would be scoped, producing invalid CSS - abe37ca: Fix handling of components and expressions inside of `<noscript>` - 8961cf4: Fix a logical error with expression tokenization when using nested functions. Previously, only the first brace pair would be respected and following pairs would be treated as expression boundaries. ## 0.7.0 ### Minor Changes - 43cbac3: Adds metadata on hydration directives used by the component ## 0.6.2 ### Patch Changes - e785310: Fix issue with import assertions creating additional imports ## 0.6.1 ### Patch Changes - e40ea9c: Include LICENSE information ## 0.6.0 ### Minor Changes - b9e2b4b: Adds option to make CSS be extracted statically ## 0.5.7 ### Patch Changes - 75bd730: Fix regression with Components mixed with active formatting elements ## 0.5.6 ### Patch Changes - 7ca419e: Improve behavior of empty expressions in body and attributes, where `{}` is equivalent to `{(void 0)}` ## 0.5.5 ### Patch Changes - 7a41d7b: Fix `<>` syntax edge case inside of expressions - b0d35b9: Fix edge case with conditional scripts ## 0.5.4 ### Patch Changes - f2e0322: Do not reconstruct active formatting elements on expression start - 0103285: Bugfix: expressions in table context ## 0.5.3 ### Patch Changes - 50cbc57: Fix fragment expression behavior edge case ## 0.5.2 ### Patch Changes - 8f0e3d7: Fix fragment parsing bugs when frontmatter is missing or top-level expressions are present ## 0.5.1 ### Patch Changes - 1f0ba41: Fix bug when fragment parsing frontmatter is missing ## 0.5.0 ### Minor Changes - 901faef: Passes projectRoot to createAstro ## 0.4.0 ### Minor Changes - 7e1aded: Change behavior of `as: "fragment"` option to support arbitrary `head` and `body` tags ## 0.3.9 ### Patch Changes - 2884a82: Bugfix: CSS comments insert semicolon ## 0.3.8 ### Patch Changes - 2c8f5d8: Fix another component-only edge case ## 0.3.7 ### Patch Changes - eb0d17f: Fix edge case with files that contain a single component ## 0.3.6 ### Patch Changes - af003e9: Fix syntax error in transformed output ## 0.3.5 ### Patch Changes - bca7e00: Fixed issue where an Astro Components could only add one style or script - 2a2f951: Fix regression where leading `<style>` elements could break generated tags - db162f8: Fix case-sensitivity of void elements - 44ee189: Fixed issue where expressions did not work within SVG elements - 9557113: Fix panic when preprocessed style is empty ## 0.3.4 ### Patch Changes - 351f298: Fix edge case with with `textarea` inside of a Component when the document generated an implicit `head` tag - 0bcfd4b: Fix CSS scoping of \* character inside of calc() expressions - 4be512f: Encode double quotes inside of quoted attributes - ad865e5: Fix behavior of expressions inside of <table> elements ## 0.3.3 ### Patch Changes - 6d2a3c2: Fix handling of top-level component nodes with leading styles - 2ce10c6: Fix "call to released function" issue ## 0.3.2 ### Patch Changes - 8800f80: Fix comments and strings inside of attribute expressions ## 0.3.1 ### Patch Changes - 432eaaf: Fix for compiler regression causing nil pointer ## 0.3.0 ### Minor Changes - 1255477: Drop support for elements inside of Frontmatter, which was undefined behavior that caused lots of TypeScript interop problems ### Patch Changes - 44dc0c6: Fixes issue with \x00 character on OSX - d74acfa: Fix regression with expressions inside of <select> elements - f50ae69: Bugfix: don’t treat import.meta as import statement ## 0.2.27 ### Patch Changes - 460c1e2: Use `$metadata.resolvePath` utility to support the `client:only` directive ## 0.2.26 ### Patch Changes - 3e5ef91: Implement getStaticPaths hoisting - 8a434f9: Fix namespace handling to support attributes like `xmlns:xlink` ## 0.2.25 ### Patch Changes - 59f36cb: Fix custom-element slot behavior to remain spec compliant - 79b2e6f: Fix style/script ordering - 6041ee5: Add support for `client:only` directive - 2cd35f6: Fix apostrophe handling inside of elements which are inside of expressions ([#1478](https://github.com/snowpackjs/astro/issues/1478)) ## 0.2.24 ### Patch Changes - bfd1b94: Fix issue with `style` and `script` processing where siblings would be skipped - 726d272: Fix <Fragment> and <> handling - f052465: Fix CSS variable parsing in the scoped CSS transform ## 0.2.23 ### Patch Changes - 632c29b: Fix nil pointer dereference when every element on page is a component - 105a159: Fix bug where text inside of elements inside of an expression was not read properly (https://github.com/snowpackjs/astro/issues/1617) ## 0.2.22 ### Patch Changes - 04c1b63: Fix bug with dynamic classes ## 0.2.21 ### Patch Changes - 7b46e9f: Revert automatic DOCTYPE injection to fix package ## 0.2.20 ### Patch Changes - 39298e4: Fix small bugs with script/style hoisting behavior - bd1014a: Bugfix: style tags in SVG ## 0.2.19 ### Patch Changes - 318dd69: Fix handling of self-closing "raw" tags like <script /> and <style /> - 9372c10: Support `define:vars` with root `html` element on pages - c4491cd: Fix bug with <script define:vars> when not using the `hoist` attribute ## 0.2.18 ### Patch Changes - 2f4b772: Prevents overrunning an array when checking for raw attribute ## 0.2.17 ### Patch Changes - 4f9155a: Bugfix: fix character limit of 4096 characters - 83df04c: Upgrade to Go 1.17 ## 0.2.16 ### Patch Changes - 9ad8da7: Allows a data-astro-raw attr to parse children as raw text - 61b77de: Bugfix: CSS and selector scoping ## 0.2.15 ### Patch Changes - 8fbae5e: Bugfix: fix component detection bug in parser - 37b5b6e: Bugfix: wait to release processStyle() until after fn call ## 0.2.14 ### Patch Changes - f59c886: Bugfix: allow for detection of void tags (e.g. <img>) - 4c8d14a: Fixes textContent containing a forward slash ## 0.2.13 ### Patch Changes - f262b61: Fix for string template usage within expressions ## 0.2.12 ### Patch Changes - c9fa9eb: Fix for apostrophe within elements ## 0.2.11 ### Patch Changes - 27629b2: Reverts the apostrophe change that broke markdown parsing ## 0.2.10 ### Patch Changes - 57eb728: Fixes hydrated scripts not recognized when using fragment transformation ## 0.2.9 ### Patch Changes - 3ea8d8c: Fix for string interpolation within titles - ef7cb1e: Fixes bug with textContent containing apostrophe character ## 0.2.8 ### Patch Changes - b2d5564: Fixes wasm build ## 0.2.6 ### Patch Changes - fix small issue with `preprocessStyle` handling of `null` or `undefined` ## 0.2.5 ### Patch Changes - Fix issue with CI deployment ## 0.2.4 ### Patch Changes - 4410c5a: Add support for a `preprocessStyle` function - 934e6a6: Chore: add linting, format code ## 0.1.15 ### Patch Changes - 5c02abf: Fix split so it always splits on first non-import/export - 93c1cd9: Bugfix: handle RegExp in Astro files - 94c59fa: Bugfix: tokenizer tries to parse JS comments - 46a5c75: Adds the top-level Astro object - 7ab9148: Improve JS scanning algorithm to be more fault tolerant, less error prone ## 0.1.12 ### Patch Changes - 96dc356: Adds hydrationMap support for custom elements ## 0.1.11 ### Patch Changes - 939283d: Adds the component export for use in hydration ## 0.1.10 ### Patch Changes - 3a336ef: Adds a hydration map to enable hydration within Astro components ## 0.1.9 ### Patch Changes - 7d887de: Allows the Astro runtime to create the Astro.slots object ## 0.1.8 ### Patch Changes - d159658: Publish via PR ## 0.1.7 ### Patch Changes - c52e69b: Include astro.wasm in the package ## 0.1.6 ### Patch Changes - bd05f7c: Actually include _any_ files? ## 0.1.5 ### Patch Changes - c4ed69e: Includes the wasm binary in the npm package ## 0.1.4 ### Patch Changes - 2f1f1b8: Pass custom element tag names to renderComponent as strings ## 0.1.3 ### Patch Changes - e4e2de5: Update to [`tinygo@0.20.0`](https://github.com/tinygo-org/tinygo/releases/tag/v0.20.0) and remove `go@1.16.x` restriction. - ae71546: Add support for `fragment` compilation, to be used with components rather than pages - 8c2aaf9: Allow multiple top-level conditional expressions ## 0.1.0 ### Patch Changes - c9407cd: Fix for using conditionals at the top-level ================================================ FILE: packages/compiler/README.md ================================================ # Astro Compiler Astro’s [Go](https://golang.org/) + WASM compiler. ## Install ``` npm install @astrojs/compiler ``` ## Usage #### Transform `.astro` to valid TypeScript The Astro compiler can convert `.astro` syntax to a TypeScript Module whose default export generates HTML. **Some notes**... - TypeScript is valid `.astro` syntax! The output code may need an additional post-processing step to generate valid JavaScript. - `.astro` files rely on a server implementation exposed as `astro/runtime/server/index.js` in the Node ecosystem. Other runtimes currently need to bring their own rendering implementation and reference it via `internalURL`. This is a pain point we're looking into fixing. ```js import { transform, type TransformResult } from "@astrojs/compiler"; const result = await transform(source, { filename: "/Users/astro/Code/project/src/pages/index.astro", sourcemap: "both", internalURL: "astro/runtime/server/index.js", }); ``` #### Parse `.astro` and return an AST The Astro compiler can emit an AST using the `parse` method. **Some notes**... - Position data is currently incomplete and in some cases incorrect. We're working on it! - A `TextNode` can represent both HTML `text` and JavaScript/TypeScript source code. - The `@astrojs/compiler/utils` entrypoint exposes `walk` and `walkAsync` functions that can be used to traverse the AST. It also exposes the `is` helper which can be used as guards to derive the proper types for each `node`. ```js import { parse } from "@astrojs/compiler"; import { walk, walkAsync, is } from "@astrojs/compiler/utils"; const result = await parse(source, { position: false, // defaults to `true` }); walk(result.ast, (node) => { // `tag` nodes are `element` | `custom-element` | `component` if (is.tag(node)) { console.log(node.name); } }); await walkAsync(result.ast, async (node) => { if (is.tag(node)) { node.value = await expensiveCalculation(node) } }); ``` ## Develop ### VSCode / CodeSpaces A `devcontainer` configuration is available for use with VSCode's [Remote Development extension pack](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack) and GitHub CodeSpaces. ## Contributing [CONTRIBUTING.md](/CONTRIBUTING.md) ================================================ FILE: packages/compiler/package.json ================================================ { "name": "@astrojs/compiler", "author": "withastro", "license": "MIT", "type": "module", "bugs": "https://github.com/withastro/compiler/issues", "homepage": "https://astro.build", "version": "3.0.1", "scripts": { "build": "tsup" }, "main": "./dist/node/index.js", "types": "./dist/shared/types.d.ts", "repository": { "type": "git", "url": "https://github.com/withastro/compiler.git" }, "files": [ "dist", "types.d.ts", "utils.d.ts", "sync.d.ts" ], "exports": { ".": { "types": "./dist/node/index.d.ts", "browser": "./dist/browser/index.js", "import": "./dist/node/index.js", "require": "./dist/node/index.cjs", "default": "./dist/browser/index.js" }, "./sync": { "types": "./dist/node/sync.d.ts", "import": "./dist/node/sync.js", "require": "./dist/node/sync.cjs", "default": "./dist/node/sync.js" }, "./utils": { "types": "./dist/node/utils.d.ts", "browser": "./dist/browser/utils.js", "import": "./dist/node/utils.js", "require": "./dist/node/utils.cjs", "default": "./dist/browser/utils.js" }, "./astro.wasm": "./dist/astro.wasm", "./types": "./dist/shared/types.d.ts", "./package.json": "./package.json" }, "devDependencies": { "@jridgewell/trace-mapping": "^0.3.16", "@types/node": "^18.15.11", "@types/sass": "^1.43.1", "acorn": "^8.8.1", "esbuild": "^0.17.17", "tsup": "^6.7.0", "typescript": "~5.0.2" } } ================================================ FILE: packages/compiler/src/browser/index.ts ================================================ import type * as types from '../shared/types.js'; import Go from './wasm_exec.js'; export const transform: typeof types.transform = (input, options) => { return ensureServiceIsRunning().transform(input, options); }; export const parse: typeof types.parse = (input, options) => { return ensureServiceIsRunning().parse(input, options); }; export const convertToTSX: typeof types.convertToTSX = (input, options) => { return ensureServiceIsRunning().convertToTSX(input, options); }; interface Service { transform: typeof types.transform; parse: typeof types.parse; convertToTSX: typeof types.convertToTSX; } let initializePromise: Promise<Service> | undefined; let longLivedService: Service | undefined; export const teardown: typeof types.teardown = () => { initializePromise = undefined; longLivedService = undefined; (globalThis as any)['@astrojs/compiler'] = undefined; }; export const initialize: typeof types.initialize = async (options) => { let wasmURL = options.wasmURL; if (!wasmURL) throw new Error('Must provide the "wasmURL" option'); wasmURL += ''; if (!initializePromise) { initializePromise = startRunningService(wasmURL).catch((err) => { // Let the caller try again if this fails. initializePromise = void 0; // But still, throw the error back up the caller. throw err; }); } longLivedService = longLivedService || (await initializePromise); }; const ensureServiceIsRunning = (): Service => { if (!initializePromise) throw new Error('You need to call "initialize" before calling this'); if (!longLivedService) throw new Error( 'You need to wait for the promise returned from "initialize" to be resolved before calling this' ); return longLivedService; }; const instantiateWASM = async ( wasmURL: string, importObject: Record<string, any> ): Promise<WebAssembly.WebAssemblyInstantiatedSource> => { let response = undefined; if (WebAssembly.instantiateStreaming) { response = await WebAssembly.instantiateStreaming(fetch(wasmURL), importObject); } else { const fetchAndInstantiateTask = async () => { const wasmArrayBuffer = await fetch(wasmURL).then((res) => res.arrayBuffer()); return WebAssembly.instantiate(wasmArrayBuffer, importObject); }; response = await fetchAndInstantiateTask(); } return response; }; const startRunningService = async (wasmURL: string): Promise<Service> => { const go = new Go(); const wasm = await instantiateWASM(wasmURL, go.importObject); go.run(wasm.instance); const service: any = (globalThis as any)['@astrojs/compiler']; return { transform: (input, options) => new Promise((resolve) => resolve(service.transform(input, options || {}))), convertToTSX: (input, options) => new Promise((resolve) => resolve(service.convertToTSX(input, options || {}))).then( (result: any) => ({ ...result, map: JSON.parse(result.map), }) ), parse: (input, options) => new Promise((resolve) => resolve(service.parse(input, options || {}))).then( (result: any) => ({ ...result, ast: JSON.parse(result.ast) }) ), }; }; ================================================ FILE: packages/compiler/src/browser/utils.ts ================================================ import type { CommentNode, ComponentNode, CustomElementNode, DoctypeNode, ElementNode, ExpressionNode, FragmentNode, FrontmatterNode, LiteralNode, Node, ParentNode, RootNode, TagLikeNode, TextNode, } from '../shared/ast.js'; export type Visitor = (node: Node, parent?: ParentNode, index?: number) => void | Promise<void>; function guard<Type extends Node>(type: string) { return (node: Node): node is Type => node.type === type; } export const is = { parent(node: Node): node is ParentNode { return Array.isArray((node as any).children); }, literal(node: Node): node is LiteralNode { return typeof (node as any).value === 'string'; }, tag(node: Node): node is TagLikeNode { return ( node.type === 'element' || node.type === 'custom-element' || node.type === 'component' || node.type === 'fragment' ); }, whitespace(node: Node): node is TextNode { return node.type === 'text' && node.value.trim().length === 0; }, root: guard<RootNode>('root'), element: guard<ElementNode>('element'), customElement: guard<CustomElementNode>('custom-element'), component: guard<ComponentNode>('component'), fragment: guard<FragmentNode>('fragment'), expression: guard<ExpressionNode>('expression'), text: guard<TextNode>('text'), doctype: guard<DoctypeNode>('doctype'), comment: guard<CommentNode>('comment'), frontmatter: guard<FrontmatterNode>('frontmatter'), }; class Walker { constructor(private callback: Visitor) {} async visit(node: Node, parent?: ParentNode, index?: number): Promise<void> { await this.callback(node, parent, index); if (is.parent(node)) { const promises = []; for (let i = 0; i < node.children.length; i++) { const child = node.children[i]; promises.push(this.callback(child, node as ParentNode, i)); } await Promise.all(promises); } } } export function walk(node: ParentNode, callback: Visitor): void { const walker = new Walker(callback); walker.visit(node); } export function walkAsync(node: ParentNode, callback: Visitor): Promise<void> { const walker = new Walker(callback); return walker.visit(node); } function serializeAttributes(node: TagLikeNode): string { let output = ''; for (const attr of node.attributes) { output += ' '; switch (attr.kind) { case 'empty': { output += `${attr.name}`; break; } case 'expression': { output += `${attr.name}={${attr.value}}`; break; } case 'quoted': { output += `${attr.name}=${attr.raw}`; break; } case 'template-literal': { output += `${attr.name}=\`${attr.value}\``; break; } case 'shorthand': { output += `{${attr.name}}`; break; } case 'spread': { output += `{...${attr.value}}`; break; } } } return output; } export interface SerializeOptions { selfClose: boolean; } /** @deprecated Please use `SerializeOptions` */ export type SerializeOtions = SerializeOptions; export function serialize(root: Node, opts: SerializeOptions = { selfClose: true }): string { let output = ''; function visitor(node: Node) { if (is.root(node)) { for (const child of node.children) { visitor(child); } } else if (is.frontmatter(node)) { output += `---${node.value}---\n\n`; } else if (is.comment(node)) { output += `<!--${node.value}-->`; } else if (is.expression(node)) { output += '{'; for (const child of node.children) { visitor(child); } output += '}'; } else if (is.literal(node)) { output += node.value; } else if (is.tag(node)) { output += `<${node.name}`; output += serializeAttributes(node); if (node.children.length === 0 && opts.selfClose) { output += ' />'; } else { output += '>'; for (const child of node.children) { visitor(child); } output += `</${node.name}>`; } } } visitor(root); return output; } ================================================ FILE: packages/compiler/src/browser/wasm_exec.ts ================================================ // @ts-nocheck // Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // // This file has been modified for use by the TinyGo compiler. // This file has been further modified for use by Astro. const enosys = () => { const err = new Error('not implemented'); err.code = 'ENOSYS'; return err; }; let outputBuf = ''; const fs = { constants: { O_WRONLY: -1, O_RDWR: -1, O_CREAT: -1, O_TRUNC: -1, O_APPEND: -1, O_EXCL: -1, }, // unused writeSync(fd, buf) { outputBuf += decoder.decode(buf); const nl = outputBuf.lastIndexOf('\n'); if (nl != -1) { console.log(outputBuf.substr(0, nl)); outputBuf = outputBuf.substr(nl + 1); } return buf.length; }, write(fd, buf, offset, length, position, callback) { if (offset !== 0 || length !== buf.length || position !== null) { callback(enosys()); return; } const n = this.writeSync(fd, buf); callback(null, n); }, chmod(path, mode, callback) { callback(enosys()); }, chown(path, uid, gid, callback) { callback(enosys()); }, close(fd, callback) { callback(enosys()); }, fchmod(fd, mode, callback) { callback(enosys()); }, fchown(fd, uid, gid, callback) { callback(enosys()); }, fstat(fd, callback) { callback(enosys()); }, fsync(fd, callback) { callback(null); }, ftruncate(fd, length, callback) { callback(enosys()); }, lchown(path, uid, gid, callback) { callback(enosys()); }, link(path, link, callback) { callback(enosys()); }, lstat(path, callback) { callback(enosys()); }, mkdir(path, perm, callback) { callback(enosys()); }, open(path, flags, mode, callback) { callback(enosys()); }, read(fd, buffer, offset, length, position, callback) { callback(enosys()); }, readdir(path, callback) { callback(enosys()); }, readlink(path, callback) { callback(enosys()); }, rename(from, to, callback) { callback(enosys()); }, rmdir(path, callback) { callback(enosys()); }, stat(path, callback) { callback(enosys()); }, symlink(path, link, callback) { callback(enosys()); }, truncate(path, length, callback) { callback(enosys()); }, unlink(path, callback) { callback(enosys()); }, utimes(path, atime, mtime, callback) { callback(enosys()); }, }; const process = { getuid() { return -1; }, getgid() { return -1; }, geteuid() { return -1; }, getegid() { return -1; }, getgroups() { throw enosys(); }, pid: -1, ppid: -1, umask() { throw enosys(); }, cwd() { throw enosys(); }, chdir() { throw enosys(); }, }; Object.defineProperties(globalThis, { fs: { value: fs, enumerable: true, }, process: { value: process, enumerable: true, }, }); const encoder = new TextEncoder('utf-8'); const decoder = new TextDecoder('utf-8'); const logLine = []; export default class Go { public importObject; constructor() { this.argv = ['js']; this.env = {}; this.exit = (code) => { if (code !== 0) { console.warn('exit code:', code); } }; this._exitPromise = new Promise((resolve) => { this._resolveExitPromise = resolve; }); this._pendingEvent = null; this._scheduledTimeouts = new Map(); this._nextCallbackTimeoutID = 1; const setInt64 = (addr, v) => { this.mem.setUint32(addr + 0, v, true); this.mem.setUint32(addr + 4, Math.floor(v / 4294967296), true); }; const getInt64 = (addr) => { const low = this.mem.getUint32(addr + 0, true); const high = this.mem.getInt32(addr + 4, true); return low + high * 4294967296; }; const loadValue = (addr) => { const f = this.mem.getFloat64(addr, true); if (f === 0) { return undefined; } if (!isNaN(f)) { return f; } const id = this.mem.getUint32(addr, true); return this._values[id]; }; const storeValue = (addr, v) => { const nanHead = 0x7ff80000; if (typeof v === 'number' && v !== 0) { if (isNaN(v)) { this.mem.setUint32(addr + 4, nanHead, true); this.mem.setUint32(addr, 0, true); return; } this.mem.setFloat64(addr, v, true); return; } if (v === undefined) { this.mem.setFloat64(addr, 0, true); return; } let id = this._ids.get(v); if (id === undefined) { id = this._idPool.pop(); if (id === undefined) { id = this._values.length; } this._values[id] = v; this._goRefCounts[id] = 0; this._ids.set(v, id); } this._goRefCounts[id]++; let typeFlag = 0; switch (typeof v) { case 'object': if (v !== null) { typeFlag = 1; } break; case 'string': typeFlag = 2; break; case 'symbol': typeFlag = 3; break; case 'function': typeFlag = 4; break; } this.mem.setUint32(addr + 4, nanHead | typeFlag, true); this.mem.setUint32(addr, id, true); }; const loadSlice = (addr) => { const array = getInt64(addr + 0); const len = getInt64(addr + 8); return new Uint8Array(this._inst.exports.mem.buffer, array, len); }; const loadSliceOfValues = (addr) => { const array = getInt64(addr + 0); const len = getInt64(addr + 8); const a = new Array(len); for (let i = 0; i < len; i++) { a[i] = loadValue(array + i * 8); } return a; }; const loadString = (addr) => { const saddr = getInt64(addr + 0); const len = getInt64(addr + 8); return decoder.decode(new DataView(this._inst.exports.mem.buffer, saddr, len)); }; const timeOrigin = Date.now() - performance.now(); this.importObject = { gojs: { // Go's SP does not change as long as no Go code is running. Some operations (e.g. calls, getters and setters) // may synchronously trigger a Go event handler. This makes Go code get executed in the middle of the imported // function. A goroutine can switch to a new stack if the current stack is too small (see morestack function). // This changes the SP, thus we have to update the SP used by the imported function. // func wasmExit(code int32) 'runtime.wasmExit': (sp) => { sp >>>= 0; const code = this.mem.getInt32(sp + 8, true); this.exited = true; delete this._inst; delete this._values; delete this._goRefCounts; delete this._ids; delete this._idPool; this.exit(code); }, // func wasmWrite(fd uintptr, p unsafe.Pointer, n int32) 'runtime.wasmWrite': (sp) => { sp >>>= 0; const fd = getInt64(sp + 8); const p = getInt64(sp + 16); const n = this.mem.getInt32(sp + 24, true); fs.writeSync(fd, new Uint8Array(this._inst.exports.mem.buffer, p, n)); }, // func resetMemoryDataView() 'runtime.resetMemoryDataView': (sp) => { sp >>>= 0; this.mem = new DataView(this._inst.exports.mem.buffer); }, // func nanotime1() int64 'runtime.nanotime1': (sp) => { sp >>>= 0; setInt64(sp + 8, (timeOrigin + performance.now()) * 1000000); }, // func walltime() (sec int64, nsec int32) 'runtime.walltime': (sp) => { sp >>>= 0; const msec = new Date().getTime(); setInt64(sp + 8, msec / 1000); this.mem.setInt32(sp + 16, (msec % 1000) * 1000000, true); }, // func scheduleTimeoutEvent(delay int64) int32 'runtime.scheduleTimeoutEvent': (sp) => { sp >>>= 0; const id = this._nextCallbackTimeoutID; this._nextCallbackTimeoutID++; this._scheduledTimeouts.set( id, setTimeout( () => { this._resume(); while (this._scheduledTimeouts.has(id)) { // for some reason Go failed to register the timeout event, log and try again // (temporary workaround for https://github.com/golang/go/issues/28975) console.warn('scheduleTimeoutEvent: missed timeout event'); this._resume(); } }, getInt64(sp + 8) + 1, // setTimeout has been seen to fire up to 1 millisecond early ), ); this.mem.setInt32(sp + 16, id, true); }, // func clearTimeoutEvent(id int32) 'runtime.clearTimeoutEvent': (sp) => { sp >>>= 0; const id = this.mem.getInt32(sp + 8, true); clearTimeout(this._scheduledTimeouts.get(id)); this._scheduledTimeouts.delete(id); }, // func getRandomData(r []byte) 'runtime.getRandomData': (sp) => { sp >>>= 0; globalThis.crypto.getRandomValues(loadSlice(sp + 8)); }, // func finalizeRef(v ref) 'syscall/js.finalizeRef': (sp) => { sp >>>= 0; const id = this.mem.getUint32(sp + 8, true); this._goRefCounts[id]--; if (this._goRefCounts[id] === 0) { const v = this._values[id]; this._values[id] = null; this._ids.delete(v); this._idPool.push(id); } }, // func stringVal(value string) ref 'syscall/js.stringVal': (sp) => { sp >>>= 0; storeValue(sp + 24, loadString(sp + 8)); }, // func valueGet(v ref, p string) ref 'syscall/js.valueGet': (sp) => { sp >>>= 0; const result = Reflect.get(loadValue(sp + 8), loadString(sp + 16)); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 32, result); }, // func valueSet(v ref, p string, x ref) 'syscall/js.valueSet': (sp) => { sp >>>= 0; Reflect.set(loadValue(sp + 8), loadString(sp + 16), loadValue(sp + 32)); }, // func valueDelete(v ref, p string) 'syscall/js.valueDelete': (sp) => { sp >>>= 0; Reflect.deleteProperty(loadValue(sp + 8), loadString(sp + 16)); }, // func valueIndex(v ref, i int) ref 'syscall/js.valueIndex': (sp) => { sp >>>= 0; storeValue(sp + 24, Reflect.get(loadValue(sp + 8), getInt64(sp + 16))); }, // valueSetIndex(v ref, i int, x ref) 'syscall/js.valueSetIndex': (sp) => { sp >>>= 0; Reflect.set(loadValue(sp + 8), getInt64(sp + 16), loadValue(sp + 24)); }, // func valueCall(v ref, m string, args []ref) (ref, bool) 'syscall/js.valueCall': (sp) => { sp >>>= 0; try { const v = loadValue(sp + 8); const m = Reflect.get(v, loadString(sp + 16)); const args = loadSliceOfValues(sp + 32); const result = Reflect.apply(m, v, args); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 56, result); this.mem.setUint8(sp + 64, 1); } catch (err) { sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 56, err); this.mem.setUint8(sp + 64, 0); } }, // func valueInvoke(v ref, args []ref) (ref, bool) 'syscall/js.valueInvoke': (sp) => { sp >>>= 0; try { const v = loadValue(sp + 8); const args = loadSliceOfValues(sp + 16); const result = Reflect.apply(v, undefined, args); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, result); this.mem.setUint8(sp + 48, 1); } catch (err) { sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, err); this.mem.setUint8(sp + 48, 0); } }, // func valueNew(v ref, args []ref) (ref, bool) 'syscall/js.valueNew': (sp) => { sp >>>= 0; try { const v = loadValue(sp + 8); const args = loadSliceOfValues(sp + 16); const result = Reflect.construct(v, args); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, result); this.mem.setUint8(sp + 48, 1); } catch (err) { sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, err); this.mem.setUint8(sp + 48, 0); } }, // func valueLength(v ref) int 'syscall/js.valueLength': (sp) => { sp >>>= 0; setInt64(sp + 16, Number.parseInt(loadValue(sp + 8).length)); }, // valuePrepareString(v ref) (ref, int) 'syscall/js.valuePrepareString': (sp) => { sp >>>= 0; const str = encoder.encode(String(loadValue(sp + 8))); storeValue(sp + 16, str); setInt64(sp + 24, str.length); }, // valueLoadString(v ref, b []byte) 'syscall/js.valueLoadString': (sp) => { sp >>>= 0; const str = loadValue(sp + 8); loadSlice(sp + 16).set(str); }, // func valueInstanceOf(v ref, t ref) bool 'syscall/js.valueInstanceOf': (sp) => { sp >>>= 0; this.mem.setUint8(sp + 24, loadValue(sp + 8) instanceof loadValue(sp + 16) ? 1 : 0); }, // func copyBytesToGo(dst []byte, src ref) (int, bool) 'syscall/js.copyBytesToGo': (sp) => { sp >>>= 0; const dst = loadSlice(sp + 8); const src = loadValue(sp + 32); if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) { this.mem.setUint8(sp + 48, 0); return; } const toCopy = src.subarray(0, dst.length); dst.set(toCopy); setInt64(sp + 40, toCopy.length); this.mem.setUint8(sp + 48, 1); }, // func copyBytesToJS(dst ref, src []byte) (int, bool) 'syscall/js.copyBytesToJS': (sp) => { sp >>>= 0; const dst = loadValue(sp + 8); const src = loadSlice(sp + 16); if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) { this.mem.setUint8(sp + 48, 0); return; } const toCopy = src.subarray(0, dst.length); dst.set(toCopy); setInt64(sp + 40, toCopy.length); this.mem.setUint8(sp + 48, 1); }, debug: (value) => { console.log(value); }, }, }; } async run(instance) { if (!(instance instanceof WebAssembly.Instance)) { throw new Error('Go.run: WebAssembly.Instance expected'); } this._inst = instance; this.mem = new DataView(this._inst.exports.mem.buffer); this._values = [ // JS values that Go currently has references to, indexed by reference id Number.NaN, 0, null, true, false, globalThis, this, ]; this._goRefCounts = new Array(this._values.length).fill(Number.POSITIVE_INFINITY); // number of references that Go has to a JS value, indexed by reference id this._ids = new Map([ // mapping from JS values to reference ids [0, 1], [null, 2], [true, 3], [false, 4], [globalThis, 5], [this, 6], ]); this._idPool = []; // unused ids that have been garbage collected this.exited = false; // whether the Go program has exited // Pass command line arguments and environment variables to WebAssembly by writing them to the linear memory. let offset = 4096; const strPtr = (str) => { const ptr = offset; const bytes = encoder.encode(`${str}\0`); new Uint8Array(this.mem.buffer, offset, bytes.length).set(bytes); offset += bytes.length; if (offset % 8 !== 0) { offset += 8 - (offset % 8); } return ptr; }; const argc = this.argv.length; const argvPtrs = []; this.argv.forEach((arg) => { argvPtrs.push(strPtr(arg)); }); argvPtrs.push(0); const keys = Object.keys(this.env).sort(); keys.forEach((key) => { argvPtrs.push(strPtr(`${key}=${this.env[key]}`)); }); argvPtrs.push(0); const argv = offset; argvPtrs.forEach((ptr) => { this.mem.setUint32(offset, ptr, true); this.mem.setUint32(offset + 4, 0, true); offset += 8; }); this._inst.exports.run(argc, argv); if (this.exited) { this._resolveExitPromise(); } await this._exitPromise; } private _resume() { if (this.exited) { throw new Error('Go program has already exited'); } this._inst.exports.resume(); if (this.exited) { this._resolveExitPromise(); } } private _makeFuncWrapper(id) { const go = this; return function () { const event = { id: id, this: this, args: arguments }; go._pendingEvent = event; go._resume(); return event.result; }; } } ================================================ FILE: packages/compiler/src/node/index.ts ================================================ export type { HoistedScript, ParseOptions, ParseResult, PreprocessorResult, TransformOptions, TransformResult, } from '../shared/types.js'; import { promises as fs } from 'node:fs'; import { fileURLToPath } from 'node:url'; import type * as types from '../shared/types.js'; import Go from './wasm_exec.js'; export const transform: typeof types.transform = async (input, options) => { return getService().then((service) => service.transform(input, options)); }; export const parse: typeof types.parse = async (input, options) => { return getService().then((service) => service.parse(input, options)); }; export const convertToTSX: typeof types.convertToTSX = async (input, options) => { return getService().then((service) => service.convertToTSX(input, options)); }; export const compile = async (template: string): Promise<string> => { const { default: mod } = await import( `data:text/javascript;charset=utf-8;base64,${Buffer.from(template).toString('base64')}` ); return mod; }; interface Service { transform: typeof types.transform; parse: typeof types.parse; convertToTSX: typeof types.convertToTSX; } let longLivedService: Promise<Service> | undefined; export const teardown: typeof types.teardown = () => { longLivedService = undefined; (globalThis as any)['@astrojs/compiler'] = undefined; }; const getService = (): Promise<Service> => { if (!longLivedService) { longLivedService = startRunningService().catch((err) => { // Let the caller try again if this fails. longLivedService = void 0; // But still, throw the error back up the caller. throw err; }); } return longLivedService; }; const instantiateWASM = async ( wasmURL: string, importObject: Record<string, any> ): Promise<WebAssembly.WebAssemblyInstantiatedSource> => { let response = undefined; const fetchAndInstantiateTask = async () => { const wasmArrayBuffer = await fs.readFile(wasmURL).then((res) => res.buffer); return WebAssembly.instantiate(new Uint8Array(wasmArrayBuffer), importObject); }; response = await fetchAndInstantiateTask(); return response; }; const startRunningService = async (): Promise<Service> => { const go = new Go(); const wasm = await instantiateWASM( fileURLToPath(new URL('../astro.wasm', import.meta.url)), go.importObject ); go.run(wasm.instance); const _service: any = (globalThis as any)['@astrojs/compiler']; return { transform: (input, options) => new Promise((resolve) => { try { resolve(_service.transform(input, options || {})); } catch (err) { // Recreate the service next time on panic longLivedService = void 0; throw err; } }), parse: (input, options) => new Promise((resolve) => resolve(_service.parse(input, options || {}))) .catch((error) => { longLivedService = void 0; throw error; }) .then((result: any) => ({ ...result, ast: JSON.parse(result.ast) })), convertToTSX: (input, options) => { return new Promise((resolve) => resolve(_service.convertToTSX(input, options || {}))) .catch((error) => { longLivedService = void 0; throw error; }) .then((result: any) => { return { ...result, map: JSON.parse(result.map) }; }); }, }; }; ================================================ FILE: packages/compiler/src/node/sync.ts ================================================ import { readFileSync } from 'node:fs'; import { fileURLToPath } from 'node:url'; import type * as types from '../shared/types.js'; import Go from './wasm_exec.js'; type UnwrappedPromise<T> = T extends (...params: any) => Promise<infer Return> ? (...params: Parameters<T>) => Return : T; interface Service { transform: UnwrappedPromise<typeof types.transform>; parse: UnwrappedPromise<typeof types.parse>; convertToTSX: UnwrappedPromise<typeof types.convertToTSX>; } function getService(): Service { if (!longLivedService) { longLivedService = startRunningService(); } return longLivedService; } let longLivedService: Service | undefined; export const transform = ((input, options) => getService().transform(input, options)) satisfies Service['transform']; export const parse = ((input, options) => { return getService().parse(input, options); }) satisfies Service['parse']; export const convertToTSX = ((input, options) => { return getService().convertToTSX(input, options); }) satisfies Service['convertToTSX']; export function startRunningService(): Service { const go = new Go(); const wasm = instantiateWASM( fileURLToPath(new URL('../astro.wasm', import.meta.url)), go.importObject ); go.run(wasm); const _service: any = (globalThis as any)['@astrojs/compiler']; return { transform: (input, options) => { try { return _service.transform(input, options || {}); } catch (err) { // Recreate the service next time on panic longLivedService = void 0; throw err; } }, parse: (input, options) => { try { const result = _service.parse(input, options || {}); return { ...result, ast: JSON.parse(result.ast) }; } catch (err) { longLivedService = void 0; throw err; } }, convertToTSX: (input, options) => { try { const result = _service.convertToTSX(input, options || {}); return { ...result, map: JSON.parse(result.map) }; } catch (err) { longLivedService = void 0; throw err; } }, }; } function instantiateWASM(wasmURL: string, importObject: Record<string, any>): WebAssembly.Instance { const wasmArrayBuffer = readFileSync(wasmURL); return new WebAssembly.Instance(new WebAssembly.Module(wasmArrayBuffer), importObject); } ================================================ FILE: packages/compiler/src/node/utils.ts ================================================ import type { CommentNode, ComponentNode, CustomElementNode, DoctypeNode, ElementNode, ExpressionNode, FragmentNode, FrontmatterNode, LiteralNode, Node, ParentNode, RootNode, TagLikeNode, TextNode, } from '../shared/ast.js'; export type Visitor = (node: Node, parent?: ParentNode, index?: number) => void | Promise<void>; function guard<Type extends Node>(type: string) { return (node: Node): node is Type => node.type === type; } export const is = { parent(node: Node): node is ParentNode { return Array.isArray((node as any).children); }, literal(node: Node): node is LiteralNode { return typeof (node as any).value === 'string'; }, tag(node: Node): node is TagLikeNode { return ( node.type === 'element' || node.type === 'custom-element' || node.type === 'component' || node.type === 'fragment' ); }, whitespace(node: Node): node is TextNode { return node.type === 'text' && node.value.trim().length === 0; }, root: guard<RootNode>('root'), element: guard<ElementNode>('element'), customElement: guard<CustomElementNode>('custom-element'), component: guard<ComponentNode>('component'), fragment: guard<FragmentNode>('fragment'), expression: guard<ExpressionNode>('expression'), text: guard<TextNode>('text'), doctype: guard<DoctypeNode>('doctype'), comment: guard<CommentNode>('comment'), frontmatter: guard<FrontmatterNode>('frontmatter'), }; class Walker { constructor(private callback: Visitor) {} async visit(node: Node, parent?: ParentNode, index?: number): Promise<void> { await this.callback(node, parent, index); if (is.parent(node)) { const promises = []; for (let i = 0; i < node.children.length; i++) { const child = node.children[i]; promises.push(this.callback(child, node as ParentNode, i)); } await Promise.all(promises); } } } export function walk(node: ParentNode, callback: Visitor): void { const walker = new Walker(callback); walker.visit(node); } export function walkAsync(node: ParentNode, callback: Visitor): Promise<void> { const walker = new Walker(callback); return walker.visit(node); } function serializeAttributes(node: TagLikeNode): string { let output = ''; for (const attr of node.attributes) { output += ' '; switch (attr.kind) { case 'empty': { output += `${attr.name}`; break; } case 'expression': { output += `${attr.name}={${attr.value}}`; break; } case 'quoted': { output += `${attr.name}=${attr.raw}`; break; } case 'template-literal': { output += `${attr.name}=\`${attr.value}\``; break; } case 'shorthand': { output += `{${attr.name}}`; break; } case 'spread': { output += `{...${attr.name}}`; break; } } } return output; } export interface SerializeOptions { selfClose: boolean; } /** @deprecated Please use `SerializeOptions` */ export type SerializeOtions = SerializeOptions; export function serialize(root: Node, opts: SerializeOptions = { selfClose: true }): string { let output = ''; function visitor(node: Node) { if (is.root(node)) { for (const child of node.children) { visitor(child); } } else if (is.frontmatter(node)) { output += `---${node.value}---\n\n`; } else if (is.comment(node)) { output += `<!--${node.value}-->`; } else if (is.expression(node)) { output += '{'; for (const child of node.children) { visitor(child); } output += '}'; } else if (is.literal(node)) { output += node.value; } else if (is.tag(node)) { output += `<${node.name}`; output += serializeAttributes(node); if (node.children.length === 0 && opts.selfClose) { output += ' />'; } else { output += '>'; for (const child of node.children) { visitor(child); } output += `</${node.name}>`; } } } visitor(root); return output; } ================================================ FILE: packages/compiler/src/node/wasm_exec.ts ================================================ // @ts-nocheck // Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // // This file has been modified for use by Astro. import crypto from 'node:crypto'; import fs from 'node:fs'; import { TextDecoder, TextEncoder } from 'node:util'; if (!globalThis.fs) { Object.defineProperty(globalThis, 'fs', { value: fs, }); } if (!globalThis.process) { Object.defineProperties(globalThis, 'process', { value: process, }); } if (!globalThis.crypto) { Object.defineProperty(globalThis, 'crypto', { value: crypto.webcrypto ? crypto.webcrypto : { getRandomValues(b) { return crypto.randomFillSync(b); }, }, }); } if (!globalThis.performance) { Object.defineProperty(globalThis, 'performance', { value: { now() { const [sec, nsec] = process.hrtime(); return sec * 1000 + nsec / 1000000; }, }, }); } // End of polyfills for common API. const encoder = new TextEncoder('utf-8'); const decoder = new TextDecoder('utf-8'); var logLine = []; export default class Go { public importObject; constructor() { this.argv = ['js']; this.env = {}; this.exit = (code) => { if (code !== 0) { console.warn('exit code:', code); } }; this._exitPromise = new Promise((resolve) => { this._resolveExitPromise = resolve; }); this._pendingEvent = null; this._scheduledTimeouts = new Map(); this._nextCallbackTimeoutID = 1; const setInt64 = (addr, v) => { this.mem.setUint32(addr + 0, v, true); this.mem.setUint32(addr + 4, Math.floor(v / 4294967296), true); }; const getInt64 = (addr) => { const low = this.mem.getUint32(addr + 0, true); const high = this.mem.getInt32(addr + 4, true); return low + high * 4294967296; }; const loadValue = (addr) => { const f = this.mem.getFloat64(addr, true); if (f === 0) { return undefined; } if (!isNaN(f)) { return f; } const id = this.mem.getUint32(addr, true); return this._values[id]; }; const storeValue = (addr, v) => { const nanHead = 0x7ff80000; if (typeof v === 'number' && v !== 0) { if (isNaN(v)) { this.mem.setUint32(addr + 4, nanHead, true); this.mem.setUint32(addr, 0, true); return; } this.mem.setFloat64(addr, v, true); return; } if (v === undefined) { this.mem.setFloat64(addr, 0, true); return; } let id = this._ids.get(v); if (id === undefined) { id = this._idPool.pop(); if (id === undefined) { id = this._values.length; } this._values[id] = v; this._goRefCounts[id] = 0; this._ids.set(v, id); } this._goRefCounts[id]++; let typeFlag = 0; switch (typeof v) { case 'object': if (v !== null) { typeFlag = 1; } break; case 'string': typeFlag = 2; break; case 'symbol': typeFlag = 3; break; case 'function': typeFlag = 4; break; } this.mem.setUint32(addr + 4, nanHead | typeFlag, true); this.mem.setUint32(addr, id, true); }; const loadSlice = (addr) => { const array = getInt64(addr + 0); const len = getInt64(addr + 8); return new Uint8Array(this._inst.exports.mem.buffer, array, len); }; const loadSliceOfValues = (addr) => { const array = getInt64(addr + 0); const len = getInt64(addr + 8); const a = new Array(len); for (let i = 0; i < len; i++) { a[i] = loadValue(array + i * 8); } return a; }; const loadString = (addr) => { const saddr = getInt64(addr + 0); const len = getInt64(addr + 8); return decoder.decode(new DataView(this._inst.exports.mem.buffer, saddr, len)); }; const timeOrigin = Date.now() - performance.now(); this.importObject = { gojs: { // Go's SP does not change as long as no Go code is running. Some operations (e.g. calls, getters and setters) // may synchronously trigger a Go event handler. This makes Go code get executed in the middle of the imported // function. A goroutine can switch to a new stack if the current stack is too small (see morestack function). // This changes the SP, thus we have to update the SP used by the imported function. // func wasmExit(code int32) 'runtime.wasmExit': (sp) => { sp >>>= 0; const code = this.mem.getInt32(sp + 8, true); this.exited = true; delete this._inst; delete this._values; delete this._goRefCounts; delete this._ids; delete this._idPool; this.exit(code); }, // func wasmWrite(fd uintptr, p unsafe.Pointer, n int32) 'runtime.wasmWrite': (sp) => { sp >>>= 0; const fd = getInt64(sp + 8); const p = getInt64(sp + 16); const n = this.mem.getInt32(sp + 24, true); fs.writeSync(fd, new Uint8Array(this._inst.exports.mem.buffer, p, n)); }, // func resetMemoryDataView() 'runtime.resetMemoryDataView': (sp) => { sp >>>= 0; this.mem = new DataView(this._inst.exports.mem.buffer); }, // func nanotime1() int64 'runtime.nanotime1': (sp) => { sp >>>= 0; setInt64(sp + 8, (timeOrigin + performance.now()) * 1000000); }, // func walltime() (sec int64, nsec int32) 'runtime.walltime': (sp) => { sp >>>= 0; const msec = new Date().getTime(); setInt64(sp + 8, msec / 1000); this.mem.setInt32(sp + 16, (msec % 1000) * 1000000, true); }, // func scheduleTimeoutEvent(delay int64) int32 'runtime.scheduleTimeoutEvent': (sp) => { sp >>>= 0; const id = this._nextCallbackTimeoutID; this._nextCallbackTimeoutID++; this._scheduledTimeouts.set( id, setTimeout( () => { this._resume(); while (this._scheduledTimeouts.has(id)) { // for some reason Go failed to register the timeout event, log and try again // (temporary workaround for https://github.com/golang/go/issues/28975) console.warn('scheduleTimeoutEvent: missed timeout event'); this._resume(); } }, getInt64(sp + 8) + 1 // setTimeout has been seen to fire up to 1 millisecond early ) ); this.mem.setInt32(sp + 16, id, true); }, // func clearTimeoutEvent(id int32) 'runtime.clearTimeoutEvent': (sp) => { sp >>>= 0; const id = this.mem.getInt32(sp + 8, true); clearTimeout(this._scheduledTimeouts.get(id)); this._scheduledTimeouts.delete(id); }, // func getRandomData(r []byte) 'runtime.getRandomData': (sp) => { sp >>>= 0; globalThis.crypto.getRandomValues(loadSlice(sp + 8)); }, // func finalizeRef(v ref) 'syscall/js.finalizeRef': (sp) => { sp >>>= 0; const id = this.mem.getUint32(sp + 8, true); this._goRefCounts[id]--; if (this._goRefCounts[id] === 0) { const v = this._values[id]; this._values[id] = null; this._ids.delete(v); this._idPool.push(id); } }, // func stringVal(value string) ref 'syscall/js.stringVal': (sp) => { sp >>>= 0; storeValue(sp + 24, loadString(sp + 8)); }, // func valueGet(v ref, p string) ref 'syscall/js.valueGet': (sp) => { sp >>>= 0; const result = Reflect.get(loadValue(sp + 8), loadString(sp + 16)); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 32, result); }, // func valueSet(v ref, p string, x ref) 'syscall/js.valueSet': (sp) => { sp >>>= 0; Reflect.set(loadValue(sp + 8), loadString(sp + 16), loadValue(sp + 32)); }, // func valueDelete(v ref, p string) 'syscall/js.valueDelete': (sp) => { sp >>>= 0; Reflect.deleteProperty(loadValue(sp + 8), loadString(sp + 16)); }, // func valueIndex(v ref, i int) ref 'syscall/js.valueIndex': (sp) => { sp >>>= 0; storeValue(sp + 24, Reflect.get(loadValue(sp + 8), getInt64(sp + 16))); }, // valueSetIndex(v ref, i int, x ref) 'syscall/js.valueSetIndex': (sp) => { sp >>>= 0; Reflect.set(loadValue(sp + 8), getInt64(sp + 16), loadValue(sp + 24)); }, // func valueCall(v ref, m string, args []ref) (ref, bool) 'syscall/js.valueCall': (sp) => { sp >>>= 0; try { const v = loadValue(sp + 8); const m = Reflect.get(v, loadString(sp + 16)); const args = loadSliceOfValues(sp + 32); const result = Reflect.apply(m, v, args); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 56, result); this.mem.setUint8(sp + 64, 1); } catch (err) { sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 56, err); this.mem.setUint8(sp + 64, 0); } }, // func valueInvoke(v ref, args []ref) (ref, bool) 'syscall/js.valueInvoke': (sp) => { sp >>>= 0; try { const v = loadValue(sp + 8); const args = loadSliceOfValues(sp + 16); const result = Reflect.apply(v, undefined, args); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, result); this.mem.setUint8(sp + 48, 1); } catch (err) { sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, err); this.mem.setUint8(sp + 48, 0); } }, // func valueNew(v ref, args []ref) (ref, bool) 'syscall/js.valueNew': (sp) => { sp >>>= 0; try { const v = loadValue(sp + 8); const args = loadSliceOfValues(sp + 16); const result = Reflect.construct(v, args); sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, result); this.mem.setUint8(sp + 48, 1); } catch (err) { sp = this._inst.exports.getsp() >>> 0; // see comment above storeValue(sp + 40, err); this.mem.setUint8(sp + 48, 0); } }, // func valueLength(v ref) int 'syscall/js.valueLength': (sp) => { sp >>>= 0; setInt64(sp + 16, Number.parseInt(loadValue(sp + 8).length)); }, // valuePrepareString(v ref) (ref, int) 'syscall/js.valuePrepareString': (sp) => { sp >>>= 0; const str = encoder.encode(String(loadValue(sp + 8))); storeValue(sp + 16, str); setInt64(sp + 24, str.length); }, // valueLoadString(v ref, b []byte) 'syscall/js.valueLoadString': (sp) => { sp >>>= 0; const str = loadValue(sp + 8); loadSlice(sp + 16).set(str); }, // func valueInstanceOf(v ref, t ref) bool 'syscall/js.valueInstanceOf': (sp) => { sp >>>= 0; this.mem.setUint8(sp + 24, loadValue(sp + 8) instanceof loadValue(sp + 16) ? 1 : 0); }, // func copyBytesToGo(dst []byte, src ref) (int, bool) 'syscall/js.copyBytesToGo': (sp) => { sp >>>= 0; const dst = loadSlice(sp + 8); const src = loadValue(sp + 32); if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) { this.mem.setUint8(sp + 48, 0); return; } const toCopy = src.subarray(0, dst.length); dst.set(toCopy); setInt64(sp + 40, toCopy.length); this.mem.setUint8(sp + 48, 1); }, // func copyBytesToJS(dst ref, src []byte) (int, bool) 'syscall/js.copyBytesToJS': (sp) => { sp >>>= 0; const dst = loadValue(sp + 8); const src = loadSlice(sp + 16); if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) { this.mem.setUint8(sp + 48, 0); return; } const toCopy = src.subarray(0, dst.length); dst.set(toCopy); setInt64(sp + 40, toCopy.length); this.mem.setUint8(sp + 48, 1); }, debug: (value) => { console.log(value); }, }, }; } async run(instance) { if (!(instance instanceof WebAssembly.Instance)) { throw new Error('Go.run: WebAssembly.Instance expected'); } this._inst = instance; this.mem = new DataView(this._inst.exports.mem.buffer); this._values = [ // JS values that Go currently has references to, indexed by reference id Number.NaN, 0, null, true, false, globalThis, this, ]; this._goRefCounts = new Array(this._values.length).fill(Number.POSITIVE_INFINITY); // number of references that Go has to a JS value, indexed by reference id this._ids = new Map([ // mapping from JS values to reference ids [0, 1], [null, 2], [true, 3], [false, 4], [globalThis, 5], [this, 6], ]); this._idPool = []; // unused ids that have been garbage collected this.exited = false; // whether the Go program has exited // Pass command line arguments and environment variables to WebAssembly by writing them to the linear memory. let offset = 4096; const strPtr = (str) => { const ptr = offset; const bytes = encoder.encode(`${str}\0`); new Uint8Array(this.mem.buffer, offset, bytes.length).set(bytes); offset += bytes.length; if (offset % 8 !== 0) { offset += 8 - (offset % 8); } return ptr; }; const argc = this.argv.length; const argvPtrs = []; this.argv.forEach((arg) => { argvPtrs.push(strPtr(arg)); }); argvPtrs.push(0); const keys = Object.keys(this.env).sort(); keys.forEach((key) => { argvPtrs.push(strPtr(`${key}=${this.env[key]}`)); }); argvPtrs.push(0); const argv = offset; argvPtrs.forEach((ptr) => { this.mem.setUint32(offset, ptr, true); this.mem.setUint32(offset + 4, 0, true); offset += 8; }); this._inst.exports.run(argc, argv); if (this.exited) { this._resolveExitPromise(); } await this._exitPromise; } private _resume() { if (this.exited) { throw new Error('Go program has already exited'); } this._inst.exports.resume(); if (this.exited) { this._resolveExitPromise(); } } private _makeFuncWrapper(id) { const go = this; return function () { const event = { id: id, this: this, args: arguments }; go._pendingEvent = event; go._resume(); return event.result; }; } } ================================================ FILE: packages/compiler/src/shared/ast.ts ================================================ export type ParentNode = | RootNode | ElementNode | ComponentNode | CustomElementNode | FragmentNode | ExpressionNode; export type LiteralNode = TextNode | DoctypeNode | CommentNode | FrontmatterNode; export type Node = | RootNode | ElementNode | ComponentNode | CustomElementNode | FragmentNode | ExpressionNode | TextNode | FrontmatterNode | DoctypeNode | CommentNode; export interface Position { start: Point; end?: Point; } export interface Point { /** 1-based line number */ line: number; /** 1-based column number, per-line */ column: number; /** 0-based byte offset */ offset: number; } export interface BaseNode { type: string; position?: Position; } export interface ParentLikeNode extends BaseNode { type: 'element' | 'component' | 'custom-element' | 'fragment' | 'expression' | 'root'; children: Node[]; } export interface ValueNode extends BaseNode { value: string; } export interface RootNode extends ParentLikeNode { type: 'root'; } export interface AttributeNode extends BaseNode { type: 'attribute'; kind: 'quoted' | 'empty' | 'expression' | 'spread' | 'shorthand' | 'template-literal'; name: string; value: string; raw?: string; } export interface TextNode extends ValueNode { type: 'text'; } export interface ElementNode extends ParentLikeNode { type: 'element'; name: string; attributes: AttributeNode[]; } export interface FragmentNode extends ParentLikeNode { type: 'fragment'; name: string; attributes: AttributeNode[]; } export interface ComponentNode extends ParentLikeNode { type: 'component'; name: string; attributes: AttributeNode[]; } export interface CustomElementNode extends ParentLikeNode { type: 'custom-element'; name: string; attributes: AttributeNode[]; } export type TagLikeNode = ElementNode | FragmentNode | ComponentNode | CustomElementNode; export interface DoctypeNode extends ValueNode { type: 'doctype'; } export interface CommentNode extends ValueNode { type: 'comment'; } export interface FrontmatterNode extends ValueNode { type: 'frontmatter'; } export interface ExpressionNode extends ParentLikeNode { type: 'expression'; } ================================================ FILE: packages/compiler/src/shared/diagnostics.ts ================================================ export enum DiagnosticCode { ERROR = 1000, ERROR_UNTERMINATED_JS_COMMENT = 1001, ERROR_FRAGMENT_SHORTHAND_ATTRS = 1002, ERROR_UNMATCHED_IMPORT = 1003, ERROR_UNSUPPORTED_SLOT_ATTRIBUTE = 1004, WARNING = 2000, WARNING_UNTERMINATED_HTML_COMMENT = 2001, WARNING_UNCLOSED_HTML_TAG = 2002, WARNING_DEPRECATED_DIRECTIVE = 2003, WARNING_IGNORED_DIRECTIVE = 2004, WARNING_UNSUPPORTED_EXPRESSION = 2005, WARNING_SET_WITH_CHILDREN = 2006, INFO = 3000, HINT = 4000, } ================================================ FILE: packages/compiler/src/shared/types.ts ================================================ import type { RootNode } from './ast.js'; import type { DiagnosticCode } from './diagnostics.js'; export type * from './ast.js'; export interface PreprocessorResult { code: string; map?: string; } export interface PreprocessorError { error: string; } export interface ParseOptions { position?: boolean; } export enum DiagnosticSeverity { Error = 1, Warning = 2, Information = 3, Hint = 4, } export interface DiagnosticMessage { severity: DiagnosticSeverity; code: DiagnosticCode; location: DiagnosticLocation; hint?: string; text: string; } export interface DiagnosticLocation { file: string; // 1-based line: number; // 1-based column: number; length: number; } export interface TransformOptions { internalURL?: string; filename?: string; normalizedFilename?: string; sourcemap?: boolean | 'inline' | 'external' | 'both'; astroGlobalArgs?: string; compact?: boolean; resultScopedSlot?: boolean; scopedStyleStrategy?: 'where' | 'class' | 'attribute'; /** * @deprecated "as" has been removed and no longer has any effect! */ as?: 'document' | 'fragment'; transitionsAnimationURL?: string; resolvePath?: (specifier: string) => Promise<string> | string; preprocessStyle?: ( content: string, attrs: Record<string, string> ) => null | Promise<PreprocessorResult | PreprocessorError>; annotateSourceFile?: boolean; } export type ConvertToTSXOptions = Pick< TransformOptions, 'filename' | 'normalizedFilename' | 'sourcemap' > & { /** If set to true, script tags content will be included in the generated TSX * Scripts will be wrapped in an arrow function to be compatible with JSX's spec */ includeScripts?: boolean; /** If set to true, style tags content will be included in the generated TSX * Styles will be wrapped in a template literal to be compatible with JSX's spec */ includeStyles?: boolean; }; export type HoistedScript = { type: string } & ( | { type: 'external'; src: string; } | { type: 'inline'; code: string; map: string; } ); export interface HydratedComponent { exportName: string; localName: string; specifier: string; resolvedPath: string; } export interface TransformResult { code: string; map: string; scope: string; styleError: string[]; diagnostics: DiagnosticMessage[]; css: string[]; scripts: HoistedScript[]; hydratedComponents: HydratedComponent[]; clientOnlyComponents: HydratedComponent[]; serverComponents: HydratedComponent[]; containsHead: boolean; propagation: boolean; } export interface SourceMap { file: string; mappings: string; names: string[]; sources: string[]; sourcesContent: string[]; version: number; } /** * Represents a location in a TSX file. * Both the `start` and `end` properties are 0-based, and are based off utf-16 code units. (i.e. JavaScript's `String.prototype.length`) */ export interface TSXLocation { start: number; end: number; } export interface TSXExtractedTag { position: TSXLocation; content: string; } export interface TSXExtractedScript extends TSXExtractedTag { type: 'processed-module' | 'module' | 'inline' | 'event-attribute' | 'json' | 'raw' | 'unknown'; } export interface TSXExtractedStyle extends TSXExtractedTag { type: 'tag' | 'style-attribute'; lang: | 'css' | 'scss' | 'sass' | 'less' | 'stylus' | 'styl' | 'postcss' | 'pcss' | 'unknown' | (string & {}); } export interface TSXResult { code: string; map: SourceMap; diagnostics: DiagnosticMessage[]; metaRanges: { frontmatter: TSXLocation; body: TSXLocation; scripts?: TSXExtractedScript[]; styles?: TSXExtractedStyle[]; }; } export interface ParseResult { ast: RootNode; diagnostics: DiagnosticMessage[]; } // This function transforms a single JavaScript file. It can be used to minify // JavaScript, convert TypeScript/JSX to JavaScript, or convert newer JavaScript // to older JavaScript. It returns a promise that is either resolved with a // "TransformResult" object or rejected with a "TransformFailure" object. // // Works in node: yes // Works in browser: yes export declare function transform( input: string, options?: TransformOptions ): Promise<TransformResult>; export declare function parse(input: string, options?: ParseOptions): Promise<ParseResult>; export declare function convertToTSX( input: string, options?: ConvertToTSXOptions ): Promise<TSXResult>; // This configures the browser-based version of astro. It is necessary to // call this first and wait for the returned promise to be resolved before // making other API calls when using astro in the browser. // // Works in node: yes // Works in browser: yes ("options" is required) export declare function initialize(options: InitializeOptions): Promise<void>; /** * When calling the core compiler APIs, e.g. `transform`, `parse`, etc, they * would automatically instantiate a WASM instance to process the input. When * done, you can call this to manually teardown the WASM instance. * * If the APIs are called again, they will automatically instantiate a new WASM * instance. In browsers, you have to call `initialize()` again before using the APIs. * * Note: Calling teardown is optional and exists mostly as an optimization only. */ export declare function teardown(): void; export interface InitializeOptions { // The URL of the "astro.wasm" file. This must be provided when running // astro in the browser. wasmURL?: string; } ================================================ FILE: packages/compiler/sync.d.ts ================================================ export * from './dist/node/sync.js'; ================================================ FILE: packages/compiler/test/bad-styles/sass.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <style lang="scss"> article:global(:is(h1, h2, h3, h4, h5, h6):hover { color: purple; } </style> <style lang="scss"> article:is(h1, h2, h3, h4, h5, h6)):hover { color: purple; } </style> `; test('it works', async () => { const result = await transform(FIXTURE, { filename: '/users/astro/apps/pacman/src/pages/index.astro', async preprocessStyle() { return { error: new Error('Unable to convert').message, }; }, }); assert.equal(result.styleError.length, 2); assert.equal(result.styleError[0], 'Unable to convert'); }); test.run(); ================================================ FILE: packages/compiler/test/bad-styles/unclosed-style.ts ================================================ import { type ParseResult, parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { ElementNode } from '../../types.js'; test('can compile unfinished style', async () => { let error = 0; let result: ParseResult = {} as ParseResult; try { result = await parse('<style>'); } catch (e) { error = 1; } const style = result.ast.children[0] as ElementNode; assert.equal(error, 0, 'Expected to compile with unfinished style.'); assert.ok(result.ast, 'Expected to compile with unfinished style.'); assert.equal(style.name, 'style', 'Expected to compile with unfinished style.'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/body-after-head-component.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- const isProd = true; --- <!DOCTYPE html> <html lang="en"> <head> {isProd && <TestHead />} <title>document</title> {isProd && <slot />} </head> <body style="color: red;"> <main> <h1>Welcome to <span class="text-gradient">Astro</span></h1> </main> </body> </html> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('has body in output', () => { assert.match( result.code, '<body style="color: red;">', 'Expected output to contain body element!' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/body-expression.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- const slugs = ['one', 'two', 'three']; --- <html> <head></head> <body> {slugs.map((slug) => ( <a href={\`/post/\${slug}\`}>{slug}</a> ))} </body> </html> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('can compiler body expression', () => { assert.ok(result.code, 'Expected to compiler body expression!'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/comment.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- /***/ --- <div /> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('Can handle multi-* comments', () => { assert.ok(result.code, 'Expected to compile'); assert.equal(result.diagnostics.length, 0, 'Expected no diagnostics'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/component-metadata/index.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import One from '../components/one.jsx'; import * as Two from '../components/two.jsx'; import { Three } from '../components/three.tsx'; import * as four from '../components/four.jsx'; import * as Five from '../components/five.jsx'; import { Six } from '../components/six.jsx'; import Seven from '../components/seven.jsx'; import * as eight from '../components/eight.jsx'; --- <One client:load /> <Two.someName client:load /> <Three client:load /> <four.nested.deep.Component client:load /> <!-- client only tests --> <Five.someName client:only /> <Six client:only /> <Seven client:only /> <eight.nested.deep.Component client:only /> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/users/astro/apps/pacman/src/pages/index.astro', }); }); test('Hydrated component', () => { const components = result.hydratedComponents; assert.equal(components.length, 4); }); test('Hydrated components: default export', () => { const components = result.hydratedComponents; assert.equal(components[0].exportName, 'default'); assert.equal(components[0].specifier, '../components/one.jsx'); assert.equal(components[0].resolvedPath, '/users/astro/apps/pacman/src/components/one.jsx'); }); test('Hydrated components: star export', () => { const components = result.hydratedComponents; assert.equal(components[1].exportName, 'someName'); assert.equal(components[1].specifier, '../components/two.jsx'); assert.equal(components[1].resolvedPath, '/users/astro/apps/pacman/src/components/two.jsx'); }); test('Hydrated components: named export', () => { const components = result.hydratedComponents; assert.equal(components[2].exportName, 'Three'); assert.equal(components[2].specifier, '../components/three.tsx'); assert.equal(components[2].resolvedPath, '/users/astro/apps/pacman/src/components/three.tsx'); }); test('Hydrated components: deep nested export', () => { const components = result.hydratedComponents; assert.equal(components[3].exportName, 'nested.deep.Component'); assert.equal(components[3].specifier, '../components/four.jsx'); assert.equal(components[3].resolvedPath, '/users/astro/apps/pacman/src/components/four.jsx'); }); test('ClientOnly component', () => { const components = result.clientOnlyComponents; assert.equal(components.length, 4); }); test('ClientOnly components: star export', () => { const components = result.clientOnlyComponents; assert.equal(components[0].exportName, 'someName'); assert.equal(components[0].specifier, '../components/five.jsx'); assert.equal(components[0].resolvedPath, '/users/astro/apps/pacman/src/components/five.jsx'); }); test('ClientOnly components: named export', () => { const components = result.clientOnlyComponents; assert.equal(components[1].exportName, 'Six'); assert.equal(components[1].specifier, '../components/six.jsx'); assert.equal(components[1].resolvedPath, '/users/astro/apps/pacman/src/components/six.jsx'); }); test('ClientOnly components: default export', () => { const components = result.clientOnlyComponents; assert.equal(components[2].exportName, 'default'); assert.equal(components[2].specifier, '../components/seven.jsx'); assert.equal(components[2].resolvedPath, '/users/astro/apps/pacman/src/components/seven.jsx'); }); test('ClientOnly components: deep nested export', () => { const components = result.clientOnlyComponents; assert.equal(components[3].exportName, 'nested.deep.Component'); assert.equal(components[3].specifier, '../components/eight.jsx'); assert.equal(components[3].resolvedPath, '/users/astro/apps/pacman/src/components/eight.jsx'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/component-name.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = '<div>Hello world!</div>'; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/src/components/Cool.astro', }); }); test('exports named component', () => { assert.match(result.code, 'export default $$Cool', 'Expected output to contain named export'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/export.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('TypeScript props on newline', async () => { const FIXTURE = `--- export type Props = BaseLayoutProps & Pick<LocaleSelectProps, 'locale' | 'someOtherProp'>; --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, 'BaseLayoutProps &\n Pick<', 'Expected output to contain full Props export' ); }); test('exported type', async () => { const FIXTURE = `--- // this is fine export type NumberType = number; // astro hangs because of this typedef. // comment it out and astro will work fine. export type FuncType = (x: number) => number; --- {new Date()} `; const result = await transform(FIXTURE); assert.match( result.code, 'export type NumberType = number;\nexport type FuncType = (x: number) => number', 'Expected output to contain full Props export' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/expression-then-node.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import Show from './Show.astro'; export interface Props<T> { each: Iterable<T>; } const { each } = Astro.props; --- { (async function* () { for await (const value of each) { let html = await Astro.slots.render('default', [value]); yield <Fragment set:html={html} />; yield '\n'; } })() } <Show when={!each.length}> <slot name="fallback" /> </Show> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('expression followed by node', () => { assert.match( result.code, `yield ' '; }`, 'Expected output to properly handle expression!' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/expressions.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('Can handle < inside JSX expression', async () => { const input = `<Layout> { new Array(totalPages).fill(0).map((_, index) => { const active = currentPage === index; if ( totalPages > 25 && ( index < currentPage - offset || index > currentPage + offset) ) { return 'HAAAA'; } }) } </Layout> `; const output = await transform(input); assert.ok(output.code, 'Expected to compile'); assert.match( output.code, `new Array(totalPages).fill(0).map((_, index) => { const active = currentPage === index; if ( totalPages > 25 && ( index < currentPage - offset || index > currentPage + offset) ) { return 'HAAAA'; } })`, 'Expected expression to be compiled properly' ); assert.equal(output.diagnostics.length, 0, 'Expected no diagnostics'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/fragment.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import ThemeToggleButton from './ThemeToggleButton.tsx'; --- <title>Uhhh</title> <body><div>Hello!</div></body> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('can compile fragment', () => { assert.not.match(result.code, '<head>', 'Expected output not to contain <head>'); assert.match( result.code, '<body><div>Hello!</div></body>', 'Expected output to contain <body><div>Hello!</div></body>' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/get-static-paths.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('getStaticPaths with braces on newline', async () => { const FIXTURE = `--- import A from './A.astro'; export async function getStaticPaths() { return [ { params: { id: '1' } }, { params: { id: '2' } }, { params: { id: '3' } } ]; } --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, 'export async function getStaticPaths()\n{', 'Expected output to contain getStaticPaths output' ); }); test('getStaticPaths with braces on newline and destructured params', async () => { const FIXTURE = `--- import A from './A.astro'; export async function getStaticPaths({ paginate }) { return [ { params: { id: '1' } }, { params: { id: '2' } }, { params: { id: '3' } } ]; } --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, 'export async function getStaticPaths({ paginate })\n{', 'Expected output to contain getStaticPaths output' ); }); test('getStaticPaths as const without braces', async () => { const FIXTURE = `--- import A from './A.astro'; export const getStaticPaths = () => ([ { params: { id: '1' } }, { params: { id: '2' } }, { params: { id: '3' } } ]) --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, 'export const getStaticPaths = () => ([', 'Expected output to contain getStaticPaths output' ); }); test('getStaticPaths as const with braces on newline', async () => { const FIXTURE = `--- import A from './A.astro'; export const getStaticPaths = () => { return [ { params: { id: '1' } }, { params: { id: '2' } }, { params: { id: '3' } } ]; } --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, 'export const getStaticPaths = () =>\n{', 'Expected output to contain getStaticPaths output' ); }); test('getStaticPaths with whitespace', async () => { const FIXTURE = `--- export const getStaticPaths = async () => { const content = await Astro.glob('../content/*.mdx'); return content .filter((page) => !page.frontmatter.draft) // skip drafts .map(({ default: MdxContent, frontmatter, url, file }) => { return { params: { slug: frontmatter.slug || "index" }, props: { MdxContent, file, frontmatter, url } } }) } const { MdxContent, frontmatter, url, file } = Astro.props; --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, '\nconst $$stdin = ', 'Expected getStaticPaths hoisting to maintain newlines' ); }); test('getStaticPaths with types', async () => { const FIXTURE = `--- export async function getStaticPaths({ paginate, }: { paginate: PaginateFunction; }) { const allPages = ( await getCollection( "blog" ) ); return paginate(allPages, { pageSize: 10 }); } --- <div></div> `; const result = await transform(FIXTURE); assert.match( result.code, '{\n paginate: PaginateFunction;\n}) {', 'Expected output to contain getStaticPaths output' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/head-injection.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = '<html><head><title>Ah</title></head></html>'; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('head injection', () => { assert.match( result.code, '$$renderHead($$result)', 'Expected output to contain $$renderHead($$result) injection point' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/lt-gt-text.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- // Component Imports import MainHead from '../components/MainHead.astro'; import Nav from '../components/Nav.astro'; import Footer from '../components/Footer.astro'; import PortfolioPreview from '../components/PortfolioPreview.astro'; // Data Fetching: List all Markdown posts in the repo. const projects = await Astro.glob('./project/**/*.md'); const featuredProject = projects[0]; // Full Astro Component Syntax: // https://docs.astro.build/core-concepts/astro-components/ --- <html lang="en"> <head> <MainHead title="Jeanine White: Personal Site" description="Jeanine White: Developer, Speaker, and Writer..." /> </head> <body> <Nav /> <small>< header ></small> <Footer /> </body> </html> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('< and > as raw text', () => { assert.ok(result.code, 'Expected to compile'); assert.equal(result.diagnostics.length, 0, 'Expected no diagnostics'); assert.match(result.code, '< header >', 'Expected output to contain < header >'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/null-chars.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <div xmlns:happy="https://example.com/schemas/happy"> <img src="jolly.avif" happy:smile="sweet"/> </div> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/Users/matthew/dev/astro/packages/astro/test/fixtures/astro-attrs/src/pages/namespaced.astro', sourcemap: 'both', }); }); test('Includes null characters', () => { assert.not.match(result.code, '\x00', 'Corrupted output'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/props-interface.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- // eslint-disable-next-line no-undef export interface Props extends astroHTML.JSX.HTMLAttributes {} const props = { ...Astro.props } as Props; --- <body class:list={props['class:list']}> <slot></slot> </body>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('retains newlines around comment', () => { assert.ok(result.code, 'Expected to compile'); assert.match(result.code, /\/\/ eslint-disable-next-line no-undef\n/g); assert.equal(result.diagnostics.length, 0, 'Expected no diagnostics'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/script-before-html.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <script> // anything ... </script> <!DOCTYPE html> <html lang="de"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width" /> <link rel="icon" type="image/svg+xml" href="/favicon.svg" /> <meta name="generator" content={Astro.generator} /> <title>Astro strips html lang tag</title> </head> <body> <main> <slot /> </main> </body> </html> <style lang="scss" is:global> html { scroll-behavior: smooth; } </style> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('includes html element', () => { assert.ok( result.code.includes('<html lang="de">'), 'Expected compile result to include html element!' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/script-fragment.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `<script src={Astro.resolve("../scripts/no_hoist_nonmodule.js")}></script>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('script fragment', () => { assert.ok(result.code, 'Can compile script fragment'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/top-level-expressions.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- const { items, emptyItems } = Astro.props; const internal = []; --- <!-- False --> {false && ( <span id="frag-false" /> )} <!-- Null --> {null && ( <span id="frag-null" /> )} <!-- True --> {true && ( <span id="frag-true" /> )} <!-- Undefined --> {false && (<span id="frag-undefined" />)} `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('top-level expressions', () => { assert.ok(result.code, 'Can compile top-level expressions'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/trailing-newline.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `{ node.shouldRenderChildren() ? ( // IMPORTANT - DO NOT SELF CLOSE THIS TAG. ASTRO FREAKS OUT. <Fragment set:html={children}></Fragment> ) : node.shouldRenderSelf() ? ( // @ts-ignore content.map((element) => { return <Astro.self content={element} components={components} />; }) ) : node.shouldRenderTag() ? ( <Tag {...props}> {node.hasChildren() ? ( <Astro.self content={children} components={components} /> ) : null} </Tag> ) : null } `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('does not add trailing newline to rendered output', () => { assert.match( result.code, `}\`;\n}, '<stdin>', undefined);`, 'Does not include a trailing newline in the render function' ); }); test.run(); ================================================ FILE: packages/compiler/test/basic/trailing-space.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- import { Markdown } from 'astro/components'; import Layout from '../layouts/content.astro'; --- <style> #root { color: green; } </style> <Layout> <div id="root"> <Markdown is:raw> ## Interesting Topic </Markdown> </div> </Layout>`; // NOTE: the lack of trailing space is important to this test! let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('trailing space', () => { assert.ok(result.code, 'Expected to compiler'); assert.not.match(result.code, 'html', 'Expected output to not contain <html>'); }); test.run(); ================================================ FILE: packages/compiler/test/basic/trailing-spaces-ii.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- --- <span class="spoiler"> <slot /> </span> <style> span { color: red; } </style> <script> console.log("hello") </script> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('trailing space', () => { assert.ok(result.code, 'Expected to compiler'); assert.match( result.code, `<span class="spoiler astro-bqati2k5"> \${$$renderSlot($$result,$$slots["default"])} </span> \${$$renderScript($$result,"<stdin>?astro&type=script&index=0&lang.ts")}\`` ); }); test.run(); ================================================ FILE: packages/compiler/test/client-directive/special-characters.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import CaretCounter from '../components/^--with-carets/Counter'; import RocketCounter from '../components/and-rockets-🚀/Counter'; import PercentCounter from '../components/now-100%-better/Counter'; import SpaceCounter from '../components/with some spaces/Counter'; import RoundBracketCounter from '../components/with-(round-brackets)/Counter'; import SquareBracketCounter from '../components/with-[square-brackets]/Counter'; import RemoteComponent from 'https://test.com/components/with-[wacky-brackets}()10%-cooler/Counter'; --- <html> <body> <h1>Special chars in component import paths from an .astro file</h1> <CaretCounter id="caret" client:visible /> <RocketCounter id="rocket" client:visible /> <PercentCounter id="percent" client:visible /> <SpaceCounter id="space" client:visible /> <RoundBracketCounter id="round-bracket" client:visible /> <SquareBracketCounter id="square-bracket" client:visible /> <RemoteComponent id="remote-component" client:visible /> </body> </html> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/users/astro/apps/pacman/src/pages/index.astro' }); }); test('does not panic', () => { assert.ok(result.code); }); test('hydrated components with carets', () => { const components = result.hydratedComponents; assert.equal(components[0].exportName, 'default'); assert.equal(components[0].specifier, '../components/^--with-carets/Counter'); assert.equal( components[0].resolvedPath, '/users/astro/apps/pacman/src/components/^--with-carets/Counter' ); }); test('hydrated components with rockets', () => { const components = result.hydratedComponents; assert.equal(components[1].exportName, 'default'); assert.equal(components[1].specifier, '../components/and-rockets-🚀/Counter'); assert.equal( components[1].resolvedPath, '/users/astro/apps/pacman/src/components/and-rockets-🚀/Counter' ); }); test('hydrated components with percent', () => { const components = result.hydratedComponents; assert.equal(components[2].exportName, 'default'); assert.equal(components[2].specifier, '../components/now-100%-better/Counter'); assert.equal( components[2].resolvedPath, '/users/astro/apps/pacman/src/components/now-100%-better/Counter' ); }); test('hydrated components with spaces', () => { const components = result.hydratedComponents; assert.equal(components[3].exportName, 'default'); assert.equal(components[3].specifier, '../components/with some spaces/Counter'); assert.equal( components[3].resolvedPath, '/users/astro/apps/pacman/src/components/with some spaces/Counter' ); }); test('hydrated components with round brackets', () => { const components = result.hydratedComponents; assert.equal(components[4].exportName, 'default'); assert.equal(components[4].specifier, '../components/with-(round-brackets)/Counter'); assert.equal( components[4].resolvedPath, '/users/astro/apps/pacman/src/components/with-(round-brackets)/Counter' ); }); test('hydrated components with square brackets', () => { const components = result.hydratedComponents; assert.equal(components[5].exportName, 'default'); assert.equal(components[5].specifier, '../components/with-[square-brackets]/Counter'); assert.equal( components[5].resolvedPath, '/users/astro/apps/pacman/src/components/with-[square-brackets]/Counter' ); }); test('hydrated components with kitchen-sink', () => { const components = result.hydratedComponents; assert.equal(components[6].exportName, 'default'); assert.equal( components[6].specifier, 'https://test.com/components/with-[wacky-brackets}()10%-cooler/Counter' ); assert.equal( components[6].resolvedPath, 'https://test.com/components/with-[wacky-brackets}()10%-cooler/Counter' ); }); test.run(); ================================================ FILE: packages/compiler/test/client-directive/warn.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <script client:load></script> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('reports a warning for using a client directive', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 2); assert.equal(result.diagnostics[0].severity, 2); assert.match(result.diagnostics[0].text, 'does not need the client:load directive'); }); test.run(); ================================================ FILE: packages/compiler/test/compact/minify.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; async function minify(input: string) { const code = (await transform(input, { compact: true })).code; return code.replace('${$$maybeRenderHead($$result)}', ''); } test('basic', async () => { assert.match( await minify(' <div>Hello {value}!</div> '), '$$render`<div>Hello ${value}!</div>`' ); assert.match( await minify(' <div> Hello {value}! </div> '), '$$render`<div> Hello ${value}! </div>`' ); }); test('preservation', async () => { assert.match(await minify('<pre> ! </pre>'), '$$render`<pre> ! </pre>`'); assert.match(await minify('<div is:raw> ! </div>'), '$$render`<div> ! </div>`'); assert.match(await minify('<Markdown is:raw> ! </Markdown>'), '$$render` ! `'); }); test('collapsing', async () => { assert.match(await minify('<span> inline </span>'), '$$render`<span> inline </span>`'); assert.match( await minify('<span>\n inline \t{\t expression \t}</span>'), '$$render`<span>\ninline ${expression}</span>`' ); assert.match( await minify('<span> inline { expression }</span>'), '$$render`<span> inline ${expression}</span>`' ); }); test('space normalization between attributes', async () => { assert.match(await minify('<p title="bar">foo</p>'), '<p title="bar">foo</p>'); assert.match(await minify('<img src="test"/>'), '<img src="test">'); assert.match(await minify('<p title = "bar">foo</p>'), '<p title="bar">foo</p>'); assert.match(await minify('<p title\n\n\t =\n "bar">foo</p>'), '<p title="bar">foo</p>'); assert.match(await minify('<img src="test" \n\t />'), '<img src="test">'); assert.match( await minify('<input title="bar" id="boo" value="hello world">'), '<input title="bar" id="boo" value="hello world">' ); }); test('space normalization around text', async () => { assert.match(await minify(' <p>blah</p>\n\n\n '), '<p>blah</p>'); assert.match(await minify('<p>foo <img> bar</p>'), '<p>foo <img> bar</p>'); assert.match(await minify('<p>foo<img>bar</p>'), '<p>foo<img>bar</p>'); assert.match(await minify('<p>foo <img>bar</p>'), '<p>foo <img>bar</p>'); assert.match(await minify('<p>foo<img> bar</p>'), '<p>foo<img> bar</p>'); assert.match(await minify('<p>foo <wbr> bar</p>'), '<p>foo <wbr> bar</p>'); assert.match(await minify('<p>foo<wbr>bar</p>'), '<p>foo<wbr>bar</p>'); assert.match(await minify('<p>foo <wbr>bar</p>'), '<p>foo <wbr>bar</p>'); assert.match(await minify('<p>foo<wbr> bar</p>'), '<p>foo<wbr> bar</p>'); assert.match(await minify('<p>foo <wbr baz moo=""> bar</p>'), '<p>foo <wbr baz moo=""> bar</p>'); assert.match(await minify('<p>foo<wbr baz moo="">bar</p>'), '<p>foo<wbr baz moo="">bar</p>'); assert.match(await minify('<p>foo <wbr baz moo="">bar</p>'), '<p>foo <wbr baz moo="">bar</p>'); assert.match(await minify('<p>foo<wbr baz moo=""> bar</p>'), '<p>foo<wbr baz moo=""> bar</p>'); assert.match( await minify('<p> <a href="#"> <code>foo</code></a> bar</p>'), '<p> <a href="#"> <code>foo</code></a> bar</p>' ); assert.match( await minify('<p><a href="#"><code>foo </code></a> bar</p>'), '<p><a href="#"><code>foo </code></a> bar</p>' ); assert.match( await minify('<p> <a href="#"> <code> foo</code></a> bar </p>'), '<p> <a href="#"> <code> foo</code></a> bar </p>' ); assert.match( await minify('<div> Empty <!-- or --> not </div>'), '<div> Empty <!-- or --> not </div>' ); assert.match( await minify('<div> a <input><!-- b --> c </div>'), '<div> a <input><!-- b --> c </div>' ); await Promise.all( [ 'a', 'abbr', 'acronym', 'b', 'big', 'del', 'em', 'font', 'i', 'ins', 'kbd', 'mark', 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup', 'time', 'tt', 'u', 'var', ].map(async (el) => { const [open, close] = [`<${el}>`, `</${el}>`]; assert.match(await minify(`foo ${open}baz${close} bar`), `foo ${open}baz${close} bar`); assert.match(await minify(`foo${open}baz${close}bar`), `foo${open}baz${close}bar`); assert.match(await minify(`foo ${open}baz${close}bar`), `foo ${open}baz${close}bar`); assert.match(await minify(`foo${open}baz${close} bar`), `foo${open}baz${close} bar`); assert.match(await minify(`foo ${open} baz ${close} bar`), `foo ${open} baz ${close} bar`); assert.match(await minify(`foo${open} baz ${close}bar`), `foo${open} baz ${close}bar`); assert.match(await minify(`foo ${open} baz ${close}bar`), `foo ${open} baz ${close}bar`); assert.match(await minify(`foo${open} baz ${close} bar`), `foo${open} baz ${close} bar`); assert.match( await minify(`<div>foo ${open}baz${close} bar</div>`), `<div>foo ${open}baz${close} bar</div>` ); assert.match( await minify(`<div>foo${open}baz${close}bar</div>`), `<div>foo${open}baz${close}bar</div>` ); assert.match( await minify(`<div>foo ${open}baz${close}bar</div>`), `<div>foo ${open}baz${close}bar</div>` ); assert.match( await minify(`<div>foo${open}baz${close} bar</div>`), `<div>foo${open}baz${close} bar</div>` ); assert.match( await minify(`<div>foo ${open} baz ${close} bar</div>`), `<div>foo ${open} baz ${close} bar</div>` ); assert.match( await minify(`<div>foo${open} baz ${close}bar</div>`), `<div>foo${open} baz ${close}bar</div>` ); assert.match( await minify(`<div>foo ${open} baz ${close}bar</div>`), `<div>foo ${open} baz ${close}bar</div>` ); assert.match( await minify(`<div>foo${open} baz ${close} bar</div>`), `<div>foo${open} baz ${close} bar</div>` ); }) ); // Don't trim whitespace around element, but do trim within await Promise.all( ['bdi', 'bdo', 'button', 'cite', 'code', 'dfn', 'math', 'q', 'rt', 'rtc', 'ruby', 'svg'].map( async (el) => { const [open, close] = [`<${el}>`, `</${el}>`]; assert.match(await minify(`foo ${open}baz${close} bar`), `foo ${open}baz${close} bar`); assert.match(await minify(`foo${open}baz${close}bar`), `foo${open}baz${close}bar`); assert.match(await minify(`foo ${open}baz${close}bar`), `foo ${open}baz${close}bar`); assert.match(await minify(`foo${open}baz${close} bar`), `foo${open}baz${close} bar`); assert.match(await minify(`foo ${open} baz ${close} bar`), `foo ${open} baz ${close} bar`); assert.match(await minify(`foo${open} baz ${close}bar`), `foo${open} baz ${close}bar`); assert.match(await minify(`foo ${open} baz ${close}bar`), `foo ${open} baz ${close}bar`); assert.match(await minify(`foo${open} baz ${close} bar`), `foo${open} baz ${close} bar`); assert.match( await minify(`<div>foo ${open}baz${close} bar</div>`), `<div>foo ${open}baz${close} bar</div>` ); assert.match( await minify(`<div>foo${open}baz${close}bar</div>`), `<div>foo${open}baz${close}bar</div>` ); assert.match( await minify(`<div>foo ${open}baz${close}bar</div>`), `<div>foo ${open}baz${close}bar</div>` ); assert.match( await minify(`<div>foo${open}baz${close} bar</div>`), `<div>foo${open}baz${close} bar</div>` ); assert.match( await minify(`<div>foo ${open} baz ${close} bar</div>`), `<div>foo ${open} baz ${close} bar</div>` ); assert.match( await minify(`<div>foo${open} baz ${close}bar</div>`), `<div>foo${open} baz ${close}bar</div>` ); assert.match( await minify(`<div>foo ${open} baz ${close}bar</div>`), `<div>foo ${open} baz ${close}bar</div>` ); assert.match( await minify(`<div>foo${open} baz ${close} bar</div>`), `<div>foo${open} baz ${close} bar</div>` ); } ) ); await Promise.all( [ ['<span> foo </span>', '<span> foo </span>'], [' <span> foo </span> ', '<span> foo </span>'], ['<nobr>a</nobr>', '<nobr>a</nobr>'], ['<nobr>a </nobr>', '<nobr>a </nobr>'], ['<nobr> a</nobr>', '<nobr> a</nobr>'], ['<nobr> a </nobr>', '<nobr> a </nobr>'], ['a<nobr>b</nobr>c', 'a<nobr>b</nobr>c'], ['a<nobr>b </nobr>c', 'a<nobr>b </nobr>c'], ['a<nobr> b</nobr>c', 'a<nobr> b</nobr>c'], ['a<nobr> b </nobr>c', 'a<nobr> b </nobr>c'], ['a<nobr>b</nobr> c', 'a<nobr>b</nobr> c'], ['a<nobr>b </nobr> c', 'a<nobr>b </nobr> c'], ['a<nobr> b</nobr> c', 'a<nobr> b</nobr> c'], ['a<nobr> b </nobr> c', 'a<nobr> b </nobr> c'], ['a <nobr>b</nobr>c', 'a <nobr>b</nobr>c'], ['a <nobr>b </nobr>c', 'a <nobr>b </nobr>c'], ['a <nobr> b</nobr>c', 'a <nobr> b</nobr>c'], ['a <nobr> b </nobr>c', 'a <nobr> b </nobr>c'], ['a <nobr>b</nobr> c', 'a <nobr>b</nobr> c'], ['a <nobr>b </nobr> c', 'a <nobr>b </nobr> c'], ['a <nobr> b</nobr> c', 'a <nobr> b</nobr> c'], ['a <nobr> b </nobr> c', 'a <nobr> b </nobr> c'], ].map(async ([input, output]) => { assert.match(await minify(input), output); }) ); }); test('surrounded by newlines (astro#7401)', async () => { const input = '<span>foo</span>\n\t\tbar\n\t\t<span>baz</span>'; const output = '<span>foo</span>\nbar\n<span>baz</span>'; const result = await minify(input); assert.match(result, output); }); test('separated by newlines (#815)', async () => { const input = '<p>\n\ta\n\t<span>b</span>\n\tc\n</p>'; const output = '<p>\na\n<span>b</span>\nc\n</p>'; const result = await minify(input); assert.match(result, output); }); ================================================ FILE: packages/compiler/test/css-order/astro-styles.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <style> body { color: green; } </style> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: 'test.astro', }); }); test('Astro style imports are included in the compiled JS', () => { const idx = result.code.indexOf('test.astro?astro&type=style&index=0&lang.css'); assert.not.equal(idx, -1); }); test.run(); ================================================ FILE: packages/compiler/test/css-order/imported-styles.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import '../styles/global.css'; --- <style> body { color: green; } </style> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: 'test.astro', }); }); test('Astro style imports placed after frontmatter imports', () => { const idx1 = result.code.indexOf('../styles/global.css'); const idx2 = result.code.indexOf('test.astro?astro&type=style&index=0&lang.css'); assert.ok(idx2 > idx1); }); test.run(); ================================================ FILE: packages/compiler/test/errors/client-only-unfound.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- import * as components from '../components'; const { MyComponent } = components; --- <html> <head> <title>Hello world</title> </head> <body> <MyComponent client:only /> </body> </html>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/src/components/Cool.astro', }); }); test('got an error because client:only component not found import', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is( result.diagnostics[0].text, 'Unable to find matching import statement for client:only component' ); assert.is( FIXTURE.split('\n')[result.diagnostics[0].location.line - 1], ' <MyComponent client:only />' ); }); test.run(); ================================================ FILE: packages/compiler/test/errors/define-vars.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('define:vars warning', async () => { const result = await transform( `<Fragment><slot /></Fragment> <style define:vars={{ color: 'red' }}></style>`, { filename: '/src/components/Foo.astro' } ); assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is(result.diagnostics[0].code, 2007); }); test('define:vars no warning', async () => { const result = await transform( `<div><slot /></div> <style define:vars={{ color: 'red' }}></style>`, { filename: '/src/components/Foo.astro' } ); assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 0); }); test.run(); ================================================ FILE: packages/compiler/test/errors/fragment-shorthand.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `<html> <head> <title>Hello world</title> </head> <body> < data-test="hello"><div></div></> </body> </html>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/src/components/fragment.astro', }); }); test('got a tokenizer error', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is( result.diagnostics[0].text, 'Unable to assign attributes when using <> Fragment shorthand syntax!' ); const loc = result.diagnostics[0].location; assert.is(FIXTURE.split('\n')[loc.line - 1], ` < data-test="hello"><div></div></>`); assert.is( FIXTURE.split('\n')[loc.line - 1].slice(loc.column - 1, loc.column - 1 + loc.length), `< data-test="hello">` ); }); test.run(); ================================================ FILE: packages/compiler/test/errors/html-comment.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `<html> <head> <title>Hello world</title> </head> <body> <div> <!-- </div> </body> </html>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/src/components/EOF.astro', }); }); test('html comment error', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is(result.diagnostics[0].text, 'Unterminated comment'); assert.is(FIXTURE.split('\n')[result.diagnostics[0].location.line - 1], ' <!--'); }); test.run(); ================================================ FILE: packages/compiler/test/errors/invalid-spread.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('...spread has warning', async () => { const result = await transform('<Head ...seo />', { filename: '/src/components/Foo.astro' }); assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is(result.diagnostics[0].code, 2008); }); test('{...spread} does not have warning', async () => { const result = await transform('<Head {...seo} />', { filename: '/src/components/Foo.astro' }); assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 0); }); test.run(); ================================================ FILE: packages/compiler/test/errors/jsx-comment.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `<html> <head> <title>Hello world</title> </head> <body> <div> {/* </div> </body> </html>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/src/components/EOF.astro', }); }); test('jsx comment error', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is(result.diagnostics[0].text, 'Unterminated comment'); assert.is(FIXTURE.split('\n')[result.diagnostics[0].location.line - 1], ' {/*'); }); test.run(); ================================================ FILE: packages/compiler/test/errors/missing-frontmatter-fence.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; // Missing opening frontmatter fence - only has closing --- const FIXTURE = `import BaseLayout from '@/layouts/BaseLayout.astro'; import { getCollection } from 'astro:content'; const posts = await getCollection('blog'); --- <BaseLayout title="Crash Test"> <h1>{posts.length}</h1> </BaseLayout>`; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: '/src/pages/checkthis.astro', }); }); test('missing opening frontmatter fence reports error instead of panic', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is(result.diagnostics[0].code, 1006); assert.is( result.diagnostics[0].text, 'The closing frontmatter fence (---) is missing an opening fence' ); assert.is( result.diagnostics[0].hint, 'Add --- at the beginning of your file before any import statements or code' ); // Verify the error location points to the closing --- fence const loc = result.diagnostics[0].location; // The line number should point to the line containing --- assert.is(FIXTURE.split('\n')[loc.line - 1], '---'); // The column and length should extract exactly the --- characters assert.is( FIXTURE.split('\n')[loc.line - 1].slice(loc.column - 1, loc.column - 1 + loc.length), '---' ); }); test.run(); ================================================ FILE: packages/compiler/test/head-metadata/with-head.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <html> <head> <title>Testing</title> </head> <body> <h1>Testing</h1> </body> </html> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: 'test.astro', }); }); test('containsHead is true', () => { assert.equal(result.containsHead, true); }); test.run(); ================================================ FILE: packages/compiler/test/head-metadata/without-head.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <slot /> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { filename: 'test.astro', }); }); test('containsHead is false', () => { assert.equal(result.containsHead, false); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/complex-frontmatter.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; const input = `--- // TODO: Due to this issue: https://github.com/withastro/astro/issues/1438, this route can't be in the same folder // as the paginated article list is or they'll conflict, so this means our articles URL are \`/article\/\${slug}\` instead // of \`/articles/\${slug}\` (with a s), once that issue is fixed, we'll be able to put it back in the right place const foobar = true; import { Article, postProcessArticle } from "$data/articles"; import type { GetStaticPaths, MDXInstance } from "$data/shared"; import ArticleLayout from "$layouts/ArticleLayout.astro"; import { getSlugFromFile } from "$utils"; export const getStaticPaths: GetStaticPaths = async () => { const articles = await Astro.glob<Article>("/content/articles/**/*.mdx"); return articles.map((article) => { const augmentedFrontmatter = postProcessArticle(article.frontmatter, article.file); return { params: { slug: getSlugFromFile(article.file) }, props: { article: { ...article, frontmatter: augmentedFrontmatter } }, }; }); }; interface Props { article: MDXInstance<Article>; } const { article } = Astro.props; --- <ArticleLayout article={article} />`; test('tracks getStaticPaths', async () => { const loc = await testJSSourcemap(input, 'getStaticPaths'); assert.equal(loc, { source: 'index.astro', line: 13, column: 14, name: null }); }); test('tracks foobar', async () => { const loc = await testJSSourcemap(input, 'foobar'); assert.equal(loc, { source: 'index.astro', line: 6, column: 7, name: null }); }); ================================================ FILE: packages/compiler/test/js-sourcemaps/deprecated.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; test('script is:inline', async () => { const input = `--- /** @deprecated */ const deprecated = "Astro" deprecated; const hello = "Astro" --- `; const output = await testJSSourcemap(input, 'deprecated;'); assert.equal(output, { line: 4, column: 1, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/error.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; test('svelte error', async () => { const input = `--- import SvelteOptionalProps from "./SvelteOptionalProps.svelte" --- <SvelteOptionalProps></SvelteOptionalProps>`; const output = await testJSSourcemap(input, '<SvelteOptionalProps>'); assert.equal(output, { line: 5, column: 1, source: 'index.astro', name: null, }); }); test('vue error', async () => { const input = `--- import SvelteError from "./SvelteError.svelte" import VueError from "./VueError.vue" --- <SvelteError></SvelteError> <VueError></VueError>`; const svelte = await testJSSourcemap(input, '<SvelteError>'); assert.equal(svelte, { line: 6, column: 1, source: 'index.astro', name: null, }); const vue = await testJSSourcemap(input, '<VueError>'); assert.equal(vue, { line: 7, column: 1, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/frontmatter.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; test('frontmatter', async () => { const input = `--- nonexistent --- `; const output = await testJSSourcemap(input, 'nonexistent'); assert.equal(output, { line: 2, column: 1, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/hover.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; const fixture = `--- const MyVariable = "Astro" /** Documentation */ const MyDocumentedVariable = "Astro" /** @author Astro */ const MyJSDocVariable = "Astro" --- `; test('hover I', async () => { const input = fixture; const output = await testJSSourcemap(input, 'MyVariable'); assert.equal(output, { line: 2, column: 11, source: 'index.astro', name: null, }); }); test('hover II', async () => { const input = fixture; const output = await testJSSourcemap(input, 'MyDocumentedVariable'); assert.equal(output, { line: 5, column: 11, source: 'index.astro', name: null, }); }); test('hover III', async () => { const input = fixture; const output = await testJSSourcemap(input, 'MyJSDocVariable'); assert.equal(output, { line: 8, column: 11, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/module.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; test('script is:inline', async () => { const input = `--- // valid import { foo } from './script.js'; import ComponentAstro from './astro.astro'; import ComponentSvelte from './svelte.svelte'; import ComponentVue from './vue.vue'; // invalid import { baz } from './script'; foo;baz;ComponentAstro;ComponentSvelte;ComponentVue; --- `; const output = await testJSSourcemap(input, `'./script'`); assert.equal(output, { line: 8, column: 23, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/script.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; test('script is:inline', async () => { const input = `<script is:inline> const MyNumber = 3; console.log(MyNumber.toStrang()); </script> `; const output = await testJSSourcemap(input, '\n'); assert.equal(output, { line: 1, column: 18, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/template.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testJSSourcemap } from '../utils.js'; test('template expression basic', async () => { const input = '<div>{nonexistent}</div>'; const output = await testJSSourcemap(input, 'nonexistent'); assert.equal(output, { source: 'index.astro', line: 1, column: 6, name: null, }); }); test('template expression has dot', async () => { const input = '<div>{console.log(hey)}</div>'; const output = await testJSSourcemap(input, 'log'); assert.equal(output, { source: 'index.astro', line: 1, column: 14, name: null, }); }); test('template expression with addition', async () => { const input = `{"hello" + hey}`; const output = await testJSSourcemap(input, 'hey'); assert.equal(output, { source: 'index.astro', line: 1, column: 11, name: null, }); }); test('html attribute', async () => { const input = `<svg color="#000"></svg>`; const output = await testJSSourcemap(input, 'color'); assert.equal(output, { source: 'index.astro', name: null, line: 1, column: 5, }); }); test('complex template expression', async () => { const input = `{[].map(ITEM => { v = "what"; return <div>{ITEMS}</div> })}`; const item = await testJSSourcemap(input, 'ITEM'); const items = await testJSSourcemap(input, 'ITEMS'); assert.equal(item, { source: 'index.astro', name: null, line: 1, column: 8, }); assert.equal(items, { source: 'index.astro', name: null, line: 3, column: 14, }); }); test('attributes', async () => { const input = `<div className="hello" />`; const className = await testJSSourcemap(input, 'className'); assert.equal(className, { source: 'index.astro', name: null, line: 1, column: 5, }); }); test('special attributes', async () => { const input = `<div @on.click="fn" />`; const onClick = await testJSSourcemap(input, '@on.click'); assert.equal(onClick, { source: 'index.astro', name: null, line: 1, column: 5, }); }); test.run(); ================================================ FILE: packages/compiler/test/js-sourcemaps/windows-linereturns.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('Windows line returns', async () => { const result = await transform( `<div class="px-10">\r\n{\r\n() => {\r\nif (style == Style.Ordered) {\r\nreturn items.map((item: string, index: number) => (\r\n// index + 1 is needed to start with 1 not 0\r\n<p>\r\n{index + 1}\r\n<Fragment set:html={item} />\r\n</p>\r\n));\r\n} else {\r\nreturn items.map((item: string) => (\r\n<Fragment set:html={item} />\r\n));\r\n}\r\n}\r\n}\r\n</div>`, { sourcemap: 'both', filename: 'index.astro', resolvePath: (i: string) => i } ); assert.ok(result.code, 'Expected to compile'); }); test.run(); ================================================ FILE: packages/compiler/test/parse/ast.ts ================================================ import { type ParseResult, parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { ElementNode } from '../../types.js'; const FIXTURE = ` --- let value = 'world'; --- <h1 name="value" empty {shorthand} expression={true} literal=\`tags\`>Hello {value}</h1> <div></div> `; let result: ParseResult; test.before(async () => { result = await parse(FIXTURE); }); test('ast', () => { assert.type(result, 'object', `Expected "parse" to return an object!`); assert.equal(result.ast.type, 'root', `Expected "ast" root node to be of type "root"`); }); test('frontmatter', () => { const [frontmatter] = result.ast.children; assert.equal( frontmatter.type, 'frontmatter', `Expected first child node to be of type "frontmatter"` ); }); test('element', () => { const [, element] = result.ast.children; assert.equal(element.type, 'element', `Expected first child node to be of type "element"`); }); test('element with no attributes', () => { const [, , , element] = result.ast.children as ElementNode[]; assert.equal(element.attributes, [], `Expected the "attributes" property to be an empty array`); }); test('element with no children', () => { const [, , , element] = result.ast.children as ElementNode[]; assert.equal(element.children, [], `Expected the "children" property to be an empty array`); }); test.run(); ================================================ FILE: packages/compiler/test/parse/client-component-unfound.ts ================================================ import { parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `{ headers && ( <nav class="mobile-toc"> <TableOfContents client:media="(max-width: 72em)" headers={headers} labels={{ onThisPage: t('rightSidebar.onThisPage'), overview: t('rightSidebar.overview') }} isMobile={true} /> </nav> ) } `; test('unfound client component', async () => { const result = await parse(FIXTURE); assert.ok(result.ast, 'Expected an AST to be generated'); }); test.run(); ================================================ FILE: packages/compiler/test/parse/escaping.ts ================================================ import { parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const STYLE = 'div { & span { color: red; }}'; const FIXTURE = `<style>${STYLE}</style>`; test('ampersand', async () => { const result = await parse(FIXTURE); assert.ok(result.ast, 'Expected an AST to be generated'); const [ { children: [{ value: output }], }, ] = result.ast.children as any; assert.equal(output, STYLE, 'Expected AST style to equal input'); }); test.run(); ================================================ FILE: packages/compiler/test/parse/fragment.ts ================================================ import { type ParseResult, parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { FragmentNode } from '../../types.js'; const FIXTURE = '<>Hello</><Fragment>World</Fragment>'; let result: ParseResult; test.before(async () => { result = await parse(FIXTURE); }); test('fragment shorthand', () => { const [first] = result.ast.children as FragmentNode[]; assert.equal(first.type, 'fragment', 'Expected first child to be of type "fragment"'); assert.equal(first.name, '', 'Expected first child to have name of ""'); }); test('fragment literal', () => { const [, second] = result.ast.children as FragmentNode[]; assert.equal(second.type, 'fragment', 'Expected second child to be of type "fragment"'); assert.equal(second.name, 'Fragment', 'Expected second child to have name of ""'); }); test.run(); ================================================ FILE: packages/compiler/test/parse/literal.ts ================================================ import { parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { ElementNode } from '../../types.js'; test('preserve style tag position I', async () => { const input = `<html><body><h1>Hello world!</h1></body></html> <style></style>`; const { ast } = await parse(input); const lastChildren = ast.children.at(-1) as ElementNode; assert.equal(lastChildren.type, 'element', 'Expected last child node to be of type "element"'); assert.equal(lastChildren.name, 'style', 'Expected last child node to be of type "style"'); }); test('preserve style tag position II', async () => { const input = `<html></html> <style></style>`; const { ast } = await parse(input); const lastChildren = ast.children.at(-1) as ElementNode; assert.equal(lastChildren.type, 'element', 'Expected last child node to be of type "element"'); assert.equal(lastChildren.name, 'style', 'Expected last child node to be of type "style"'); }); test('preserve style tag position III', async () => { const input = `<html lang="en"><head><BaseHead /></head></html> <style>@use "../styles/global.scss";</style>`; const { ast } = await parse(input); const lastChildren = ast.children.at(-1) as ElementNode; assert.equal(lastChildren.type, 'element', 'Expected last child node to be of type "element"'); assert.equal(lastChildren.name, 'style', 'Expected last child node to be of type "style"'); assert.equal( lastChildren.children[0].type, 'text', 'Expected last child node to be of type "text"' ); }); test('preserve style tag position IV', async () => { const input = `<html lang="en"><head><BaseHead /></head><body><Header /></body></html> <style>@use "../styles/global.scss";</style>`; const { ast } = await parse(input); const lastChildren = ast.children.at(-1) as ElementNode; assert.equal(lastChildren.type, 'element', 'Expected last child node to be of type "element"'); assert.equal(lastChildren.name, 'style', 'Expected last child node to be of type "style"'); assert.equal( lastChildren.children[0].type, 'text', 'Expected last child node to be of type "text"' ); }); test('preserve style tag position V', async () => { const input = `<html lang="en"><head><BaseHead /></head><body><Header /></body><style>@use "../styles/global.scss";</style></html>`; const { ast } = await parse(input); const firstChild = ast.children.at(0) as ElementNode; const lastChild = firstChild.children.at(-1) as ElementNode; assert.equal(lastChild.type, 'element', 'Expected last child node to be of type "element"'); assert.equal(lastChild.name, 'style', 'Expected last child node to be of type "style"'); assert.equal(lastChild.children[0].type, 'text', 'Expected last child node to be of type "text"'); }); ================================================ FILE: packages/compiler/test/parse/multibyte-characters.ts ================================================ import { parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = '{foo},'; test('does not crash', async () => { const result = await parse(FIXTURE); assert.ok(result.ast, 'does not crash'); }); test('properly maps the position', async () => { const { ast: { children }, } = await parse(FIXTURE); const text = children[1]; assert.equal(text.position?.start.offset, 5, 'properly maps the text start position'); assert.equal(text.position?.end?.offset, 8, 'properly maps the text end position'); }); test.run(); ================================================ FILE: packages/compiler/test/parse/orphan-head.ts ================================================ import { type ParseResult, parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Document</title> </head> <body> <h1> Hello world!</h1> </body> </html> `; let result: ParseResult; test.before(async () => { result = await parse(FIXTURE); }); test('orphan head', () => { assert.ok(result, 'able to parse'); const [doctype, html, ...others] = result.ast.children; assert.equal(others.length, 1, 'Expected only three child nodes!'); assert.equal(doctype.type, 'doctype', `Expected first child node to be of type "doctype"`); assert.equal(html.type, 'element', `Expected first child node to be of type "element"`); }); test.run(); ================================================ FILE: packages/compiler/test/parse/orphan-slot.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import { Code, Markdown } from 'astro/components'; const {isRequired, description, example} = Astro.props; --- <slot /> {isRequired && <p class="mt-16 badge badge-info">Required</p>} {description?.trim() && <Markdown content={description} />} {example && <Code code={example} lang='yaml' />} `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('orphan slot', () => { assert.ok(result.code, 'able to parse'); }); test.run(); ================================================ FILE: packages/compiler/test/parse/position.ts ================================================ import { parse } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { ElementNode, FrontmatterNode } from '../../types.js'; test('include start and end positions', async () => { const input = `--- // Hello world! --- <iframe>Hello</iframe><div></div>`; const { ast } = await parse(input); const iframe = ast.children[1] as ElementNode; assert.is(iframe.name, 'iframe'); assert.ok(iframe.position?.start, 'Expected serialized output to contain a start position'); assert.ok(iframe.position.end, 'Expected serialized output to contain an end position'); }); test('include start and end positions for comments', async () => { const input = `--- // Hello world! --- <!-- prettier:ignore --> <iframe>Hello</iframe><div></div>`; const { ast } = await parse(input); const comment = ast.children[1] as ElementNode; assert.is(comment.type, 'comment'); assert.ok(comment.position?.start, 'Expected serialized output to contain a start position'); assert.ok(comment.position.end, 'Expected serialized output to contain an end position'); }); test('include start and end positions for text', async () => { const input = `--- // Hello world! --- Hello world!`; const { ast } = await parse(input); const text = ast.children[1] as ElementNode; assert.is(text.type, 'text'); assert.ok(text.position?.start, 'Expected serialized output to contain a start position'); assert.ok(text.position?.end, 'Expected serialized output to contain an end position'); }); test('include start and end positions for self-closing tags', async () => { const input = '<input/>'; const { ast } = await parse(input); const element = ast.children[0] as ElementNode; assert.is(element.type, 'element'); assert.is(element.name, 'input'); assert.ok(element.position?.start, 'Expected serialized output to contain a start position'); assert.ok(element.position.end, 'Expected serialized output to contain an end position'); }); test('include correct start and end position for self-closing tag', async () => { const input = ` <!-- prettier-ignore --> <li />`; const { ast } = await parse(input); const li = ast.children[1] as ElementNode; assert.is(li.name, 'li'); assert.ok(li.position?.start, 'Expected serialized output to contain a start position'); assert.ok(li.position.end, 'Expected serialized output to contain an end position'); assert.equal( li.position.start, { line: 3, column: 1, offset: 26 }, 'Expected serialized output to contain a start position' ); assert.equal( li.position.end, { line: 3, column: 6, offset: 31 }, 'Expected serialized output to contain an end position' ); }); test('include correct start and end position for normal closing tag', async () => { const input = ` <!-- prettier-ignore --> <li></li>`; const { ast } = await parse(input); const li = ast.children[1] as ElementNode; assert.is(li.name, 'li'); assert.ok(li.position?.start, 'Expected serialized output to contain a start position'); assert.ok(li.position.end, 'Expected serialized output to contain an end position'); assert.equal( li.position.start, { line: 3, column: 1, offset: 26 }, 'Expected serialized output to contain a start position' ); assert.equal( li.position.end, { line: 3, column: 10, offset: 35 }, 'Expected serialized output to contain an end position' ); }); test('include start and end position if frontmatter is only thing in file (#802)', async () => { const input = `--- ---`; const { ast } = await parse(input); const frontmatter = ast.children[0] as FrontmatterNode; assert.is(frontmatter.type, 'frontmatter'); assert.ok(frontmatter.position?.start, 'Expected serialized output to contain a start position'); assert.ok(frontmatter.position.end, 'Expected serialized output to contain an end position'); assert.equal( frontmatter.position.start, { line: 1, column: 1, offset: 0 }, 'Expected serialized output to contain a start position' ); assert.equal( frontmatter.position.end, { line: 2, column: 4, offset: 7 }, 'Expected serialized output to contain an end position' ); }); test.run(); ================================================ FILE: packages/compiler/test/parse/serialize.ts ================================================ import { parse } from '@astrojs/compiler'; import { serialize } from '@astrojs/compiler/utils'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = `--- let value = 'world'; let content = "Testing 123"; --- <style> :root { color: red; } </style> <div>Hello {value}</div> <h1 name="value" set:html={content} empty {shorthand} expression={true} literal=\`tags\` {...spread}>Hello {value}</h1> <Fragment set:html={content} /> <Markdown is:raw> # Hello world! </Markdown> `; let result: string; test.before(async () => { const { ast } = await parse(FIXTURE); try { result = serialize(ast); } catch (e) { // eslint-disable-next-line no-console console.log(e); } }); test('serialize', () => { assert.type(result, 'string', `Expected "serialize" to return an object!`); assert.equal(result, FIXTURE, 'Expected serialized output to equal input'); }); test('self-close elements', async () => { const input = '<div />'; const { ast } = await parse(input); const output = serialize(ast, { selfClose: false }); const selfClosedOutput = serialize(ast); assert.equal(output, '<div></div>', 'Expected serialized output to equal <div></div>'); assert.equal(selfClosedOutput, input, `Expected serialized output to equal ${input}`); }); test('raw attributes', async () => { const input = `<div name="value" single='quote' un=quote />`; const { ast } = await parse(input); const output = serialize(ast); assert.equal(output, input, `Expected serialized output to equal ${input}`); }); test.run(); ================================================ FILE: packages/compiler/test/resolve-path/preserve.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import Foo from './Foo.jsx' import Bar from './Bar.jsx' import { name } './foo.module.css' --- <Foo /> <Foo client:load /> <Foo client:only="react" /> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { resolvePath: async (s) => s, }); }); test('preserve path', () => { assert.match(result.code, /"client:load":true.*"client:component-path":\("\.\/Foo\.jsx"\)/); assert.match(result.code, /"client:only":"react".*"client:component-path":\("\.\/Foo\.jsx"\)/); }); test('no metadata', () => { assert.not.match(result.code, /\$\$metadata/); assert.not.match(result.code, /\$\$createMetadata/); assert.not.match(result.code, /createMetadata as \$\$createMetadata/); assert.not.match(result.code, /import \* as \$\$module\d/); }); test.run(); ================================================ FILE: packages/compiler/test/scope/same-source.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- --- <style> div { background-color: blue; width: 50px; height: 50px; } </style> <div /> `.trim(); function grabAstroScope(code: string) { const match = /astro-[0-9A-Za-z]+/.exec(code); if (match) { return match[0]; } return null; } test('Similar components have different scoped class names', async () => { let result = await transform(FIXTURE, { normalizedFilename: '/src/pages/index.astro', }); const scopeA = grabAstroScope(result.code); assert.ok(scopeA); result = await transform(FIXTURE, { normalizedFilename: '/src/pages/two.astro', }); const scopeB = grabAstroScope(result.code); assert.ok(scopeB); assert.ok(scopeA !== scopeB, 'The scopes should not match for different files'); }); test.run(); ================================================ FILE: packages/compiler/test/scripts/isinline-hint.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('reports a hint for adding attributes to a script tag without is:inline', async () => { const result = await transform(`<script type="module"></script>`); assert.equal(result.diagnostics[0].severity, 4); assert.match(result.diagnostics[0].text, /\#script-processing/); }); test('does not report a diagnostic for the src attribute', async () => { const result = await transform(`<script src="/external.js"></script>`); console.log(result.diagnostics); assert.equal(result.diagnostics.length, 0); }); test.run(); ================================================ FILE: packages/compiler/test/scripts/order.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('outputs scripts in expected order', async () => { const result = await transform( ` <script>console.log(1)</script> <script>console.log(2)</script>` ); const scripts = result.scripts; // for typescript if (scripts[0].type === 'external') throw new Error('Script is external'); if (scripts[1].type === 'external') throw new Error('Script is external'); assert.match(scripts[0].code, 'console.log(1)'); assert.match(scripts[1].code, 'console.log(2)'); }); test.run(); ================================================ FILE: packages/compiler/test/server-islands/meta.ts ================================================ import { fileURLToPath } from 'node:url'; import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import Avatar from './Avatar.astro'; import {Other} from './Other.astro'; --- <Avatar server:defer /> <Other server:defer /> `; let result: Awaited<ReturnType<typeof transform>>; test.before(async () => { result = await transform(FIXTURE, { resolvePath: async (s: string) => { const out = new URL(s, import.meta.url); return fileURLToPath(out); }, }); }); test('component metadata added', () => { assert.equal(result.serverComponents.length, 2); }); test('component should contain head propagation', () => { assert.equal(result.propagation, true); }); test('path resolved to the filename', () => { const m = result.serverComponents[0]; assert.ok(m.specifier !== m.resolvedPath); }); test('localName is the name used in the template', () => { assert.equal(result.serverComponents[0].localName, 'Avatar'); assert.equal(result.serverComponents[1].localName, 'Other'); }); test('exportName is the export name of the imported module', () => { assert.equal(result.serverComponents[0].exportName, 'default'); assert.equal(result.serverComponents[1].exportName, 'Other'); }); test.run(); ================================================ FILE: packages/compiler/test/slot-result/result.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- import Parent from './Parent.astro'; --- <Parent> <div></div> </Parent> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { resolvePath: async (s) => s, resultScopedSlot: true, }); }); test('resultScopedSlot: includes the result object in the call to the slot', () => { assert.match(result.code, /\(\$\$result\) =>/); }); test.run(); ================================================ FILE: packages/compiler/test/static-extraction/css.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- --- <style> .thing { color: green; } .url-space { background: url('/white space.png'); } .escape:not(#\\#) { color: red; } </style> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('extracts styles', () => { assert.equal( result.css.length, 1, `Incorrect CSS returned. Expected a length of 1 and got ${result.css.length}` ); }); test('escape url with space', () => { assert.match(result.css[0], 'background:url(/white\\ space.png)'); }); test('escape css syntax', () => { assert.match(result.css[0], ':not(#\\#)'); }); test.run(); ================================================ FILE: packages/compiler/test/static-extraction/hoist-expression.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- const url = 'foo'; --- <script type="module" hoist src={url}></script> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE); }); test('logs warning with hoisted expression', () => { assert.ok(result.code); }); test.run(); ================================================ FILE: packages/compiler/test/stress/index.ts ================================================ import { transform } from '@astrojs/compiler'; async function run() { await transform( `--- import CartItems from './CartItems.astro'; --- <script> document.addEventListener('alpine:init', () => { Alpine.data('initCartDrawer', () => ({ open: false, cart: {}, getData(data) { if (data.cart) { this.cart = data.cart this.setCartItems(); } }, cartItems: [], setCartItems() { this.cartItems = this.cart && this.cart.items.sort(function(a,b) { return a.item_id - b.item_id }) || [] }, deleteItemFromCart(itemId) { var formKey = document.querySelector('input[name=form_key]').value; fetch(BASE_URL+"checkout/sidebar/removeItem/", { "headers": { "content-type": "application/x-www-form-urlencoded; charset=UTF-8", }, "body": "form_key="+ formKey + "&item_id="+itemId, "method": "POST", "mode": "cors", "credentials": "include" }).then(function (response) { if (response.redirected) { window.location.href = response.url; } else if (response.ok) { return response.json(); } else { typeof window.dispatchMessages !== "undefined" && window.dispatchMessages( [{ type: "warning", text: "Could not remove item from quote." }], 5000 ); } }).then(function (response) { typeof window.dispatchMessages !== "undefined" && window.dispatchMessages( [{ type: response.success ? "success" : "error", text: response.success ? "You removed the item." : response.error_message }], 5000 ); var reloadCustomerDataEvent = new CustomEvent("reload-customer-section-data"); window.dispatchEvent(reloadCustomerDataEvent); }); } })) }) </script> <section id="cart-drawer" x-data="initCartDrawer" @private-content-loaded.window="getData(event.detail.data)" @toggle-cart.window="open=true;" @keydown.window.escape="open=false" > <template x-if="cart && cart.summary_count"> <div role="dialog" aria-labelledby="cart-drawer-title" aria-modal="true" @click.outside="open = false" class="fixed inset-y-0 right-0 z-30 flex max-w-full"> <div class="backdrop" x-show="open" x-transition:enter="ease-in-out duration-500" x-transition:enter-start="opacity-0" x-transition:enter-end="opacity-100" x-transition:leave="ease-in-out duration-500" x-transition:leave-start="opacity-100" x-transition:leave-end="opacity-0" @click="open = false" aria-label="Close panel"> </div> <div class="relative w-screen max-w-md shadow-2xl" x-show="open" x-transition:enter="transform transition ease-in-out duration-500 sm:duration-700" x-transition:enter-start="translate-x-full" x-transition:enter-end="translate-x-0" x-transition:leave="transform transition ease-in-out duration-500 sm:duration-700" x-transition:leave-start="translate-x-0" x-transition:leave-end="translate-x-full" > <div x-show="open" x-transition:enter="ease-in-out duration-500" x-transition:enter-start="opacity-0" x-transition:enter-end="opacity-100" x-transition:leave="ease-in-out duration-500" x-transition:leave-start="opacity-100" x-transition:leave-end="opacity-0" class="absolute top-0 right-0 flex p-2 mt-2" > <button @click="open = false;" aria-label="Close panel" class="p-2 text-gray-300 transition duration-150 ease-in-out hover:text-black"> <svg class="w-6 h-6" fill="none" viewBox="0 0 24 24" stroke="currentColor"> <path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"> </path> </svg> </button> </div> <div class="flex flex-col h-full py-6 space-y-6 bg-white shadow-xl"> <header class="px-4 sm:px-6"> <h2 id="cart-drawer-title" class="text-lg font-medium leading-7 text-gray-900">My Cart</h2> </header> <div class="relative grid gap-6 px-4 py-6 overflow-y-auto bg-white border-b sm:gap-8 sm:px-6 border-container"> <template x-for="item in cartItems"> <!-- <CartItems/> --> <div class="flex items-start p-3 -m-3 space-x-4 transition duration-150 ease-in-out rounded-lg hover:bg-gray-100"> <a :href="item.product_url" class="w-1/4"> <img :src="item.product_image.src" :width="item.product_image.width" :height="item.product_image.height" loading="lazy" /> </a> <div class="w-3/4 space-y-2"> <div> <p class="text-xl"> <span x-html="item.qty"></span> x <span x-html="item.product_name"></span> </p> <p class="text-sm"><span x-html="item.product_sku"/></p> </div> <template x-for="option in item.options"> <div class="pt-2"> <p class="font-semibold" x-text="option.label + ':'"></p> <p class="text-secondary" x-html="option.value"></p> </div> </template> <p><span x-html="item.product_price"></span></p> <div class="pt-4"> <a :href="item.configure_url" x-show="item.product_type !== 'grouped'" class="inline-flex p-2 mr-2 btn btn-primary"> <svg fill="none" viewBox="0 0 24 24" stroke="currentColor" size="16" class="w-5 h-5"> <path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15.232 5.232l3.536 3.536m-2.036-5.036a2.5 2.5 0 113.536 3.536L6.5 21.036H3v-3.572L16.732 3.732z"> </path> </svg> </a> </div> </div> </div> </template> </div> <div class="relative grid gap-6 px-4 py-6 bg-white sm:gap-8 sm:px-6"> <div class="w-full p-3 -m-3 space-x-4 transition duration-150 ease-in-out rounded-lg hover:bg-gray-100"> <p>Subtotal: <span x-html="cart.subtotal"></span></p> </div> <div class="w-full p-3 -m-3 space-x-4 transition duration-150 ease-in-out rounded-lg hover:bg-gray-100"> <a @click.prevent.stop="$dispatch('toggle-authentication', {url: 'checkout'});" href="checkout" class="inline-flex btn btn-primary"> Checkout </a> <span>or</span> <a href="checkout/cart" class="underline"> View and Edit Cart </a> </div> </div> </div> </div> </template> </section>`, { sourcemap: true, } ); } const MAX_CONCURRENT_RENDERS = 25; const MAX_RENDERS = 1e4; async function test() { await run(); const promises = []; const tests = []; for (let i = 0; i < MAX_RENDERS; i++) { tests.push(() => { if (i % 1000 === 0) { console.log(`Test ${i}`); } return run(); }); } // Throttle the paths to avoid overloading the CPU with too many tasks. for (const ts of throttle(MAX_CONCURRENT_RENDERS, tests)) { for (const t of ts) { promises.push(t()); } // This blocks generating more paths until these 10 complete. await Promise.all(promises); // This empties the array without allocating a new one. promises.length = 0; } } // Throttle the rendering a paths to prevents creating too many Promises on the microtask queue. function* throttle(max: number, tests: any) { const tmp = []; let i = 0; for (const t of tests) { tmp.push(t); if (i === max) { yield tmp; // Empties the array, to avoid allocating a new one. tmp.length = 0; i = 0; } else { i++; } } // If tmp has items in it, that means there were less than {max} paths remaining // at the end, so we need to yield these too. if (tmp.length) { yield tmp; } } test(); ================================================ FILE: packages/compiler/test/styles/define-vars.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { preprocessStyle } from '../utils.js'; test('does not include define:vars in generated markup', async () => { const input = ` --- let color = 'red'; --- <style lang="scss" define:vars={{ color }}> div { color: var(--color); } </style> <div>Hello world!</div> <div>Ahhh</div> `; const result = await transform(input, { preprocessStyle, }); assert.ok(!result.code.includes('STYLES')); assert.equal(result.css.length, 1); }); test('handles style object and define:vars', async () => { const input = ` --- let color = 'red'; --- <div style={{ color: 'var(--color)' }}>Hello world!</div> <style define:vars={{ color }}></style> `; const result = await transform(input); assert.match(result.code, `$$addAttribute([{ color: 'var(--color)' },$$definedVars], "style")`); }); test.run(); ================================================ FILE: packages/compiler/test/styles/emit-scope.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` --- let value = 'world'; --- <style>div { color: red; }</style> <div>Hello world!</div> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { sourcemap: true, }); }); test('emits a scope', () => { assert.ok(result.scope, 'Expected to return a scope'); }); test.run(); ================================================ FILE: packages/compiler/test/styles/empty-style.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { preprocessStyle } from '../utils.js'; const FIXTURE = ` --- let value = 'world'; --- <style lang="scss"></style> <div>Hello world!</div> <div>Ahhh</div> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { sourcemap: true, preprocessStyle, }); }); test('can compile empty style', () => { assert.ok(result.code, 'Expected to compile with empty style.'); }); test.run(); ================================================ FILE: packages/compiler/test/styles/hash.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE_A = ` <style> h1 { color: red; } </style> <h1>Hello world!</h1> `; const FIXTURE_B = ` <style> h1 { color: blue; } </style> <h1>Hello world!</h1> `; const FIXTURE_C = ` <style> h1 { color: red; } </style> <script>console.log("Hello world")</script> `; const FIXTURE_D = ` <style> h1 { color: red; } </style> <script>console.log("Hello world!")</script> `; const scopes: string[] = []; test.before(async () => { const [{ scope: a }, { scope: b }, { scope: c }, { scope: d }] = await Promise.all( [FIXTURE_A, FIXTURE_B, FIXTURE_C, FIXTURE_D].map((source) => transform(source)) ); scopes.push(a, b, c, d); }); test('hash changes when content outside of style change', () => { const [, b, c] = scopes; assert.not.equal(b, c, 'Expected scopes to not be equal'); }); test('hash changes when scripts change', () => { const [, , c, d] = scopes; assert.not.equal(c, d, 'Expected scopes to not be equal'); }); test.run(); ================================================ FILE: packages/compiler/test/styles/sass.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { preprocessStyle } from '../utils.js'; const FIXTURE = ` --- let value = 'world'; --- <style lang="scss" define:vars={{ a: 0 }}> $color: red; div { color: $color; } </style> <div>Hello world!</div> <div>Ahhh</div> <style lang="scss"> $color: green; div { color: $color; } </style> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { sourcemap: true, preprocessStyle, }); }); test('transforms scss one', () => { assert.match(result.css[0], 'color:red', 'Expected "color:red" to be present.'); }); test('transforms scss two', () => { assert.match( result.css[result.css.length - 1], 'color:green', 'Expected "color:green" to be present.' ); }); test.run(); ================================================ FILE: packages/compiler/test/table/components.ts ================================================ import { transform } from '@astrojs/compiler'; import { parse } from 'acorn'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('allows components in table', async () => { const input = ` --- const MyTableRow = "tr"; --- <table> <MyTableRow> <td>Witch</td> </MyTableRow> <MyTableRow> <td>Moon</td> </MyTableRow> </table> `; let error = 0; try { const { code } = await transform(input, { filename: 'index.astro', sourcemap: 'inline' }); parse(code, { ecmaVersion: 'latest', sourceType: 'module' }); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should generate valid code'); }); ================================================ FILE: packages/compiler/test/table/expressions.ts ================================================ import { transform } from '@astrojs/compiler'; import { parse } from 'acorn'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('allows expressions in table', async () => { const input = ` --- --- <html lang="en"> <head> <meta charset="utf-8" /> <link rel="icon" type="image/svg+xml" href="/favicon.svg" /> <meta name="viewport" content="width=device-width" /> <meta name="generator" content={Astro.generator} /> <title>Astro</title> </head> <body> <table> <tbody> {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} </tbody> </table> </body> </html> `; let error = 0; try { const { code } = await transform(input, { filename: 'index.astro', sourcemap: 'inline' }); parse(code, { ecmaVersion: 'latest', sourceType: 'module' }); assert.match(code, '<tr>${num}</tr>'); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should generate valid code'); }); test('allows many expressions in table', async () => { const input = ` --- --- <html lang="en"> <head> <meta charset="utf-8" /> <link rel="icon" type="image/svg+xml" href="/favicon.svg" /> <meta name="viewport" content="width=device-width" /> <meta name="generator" content={Astro.generator} /> <title>Astro</title> </head> <body> <table> <tbody> {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} </tbody> </table> </body> </html> `; let error = 0; try { const { code } = await transform(input, { filename: 'index.astro', sourcemap: 'inline' }); parse(code, { ecmaVersion: 'latest', sourceType: 'module' }); assert.match(code, '<tr>${num}</tr>'); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should generate valid code'); }); ================================================ FILE: packages/compiler/test/table/in-expression.ts ================================================ import { transform } from '@astrojs/compiler'; import { parse } from 'acorn'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('does not panic on table in expression', async () => { const input = ` <section> {course.reviews && course.reviews.length && <> <div class="py-3"> <hr> </div> <h2 class="text-lg font-bold">รีวิวจากผู้เรียน (ทั้งหมด {course.reviews.length} รีวิว คะแนนเฉลี่ย {course.reviews.reduce((p, c, _, { length }) => p + c.star / length, 0).toFixed(1)})</h2> <table class="rounded shadow dark:shadow-none dark:border dark:border-gray-700"> <tbody> {course.reviews.map(review => ( <tr class="even:bg-gray-50 dark:even:bg-gray-700"> <td class="p-2 align-top"><Icon class="w-8 h-8 flex-shrink-0" name="mdi:account-circle"></Icon></td> <td class="p-2 w-full"> <h3 class="whitespace-nowrap font-bold">{review.name}</h3> {review.comment && <p class="text-sm text-secondary">{review.comment}</p>} </td> <td class="p-2 align-top">{'⭐'.repeat(review.star)}</td> </tr> ))} </tbody> </table> </> } </section> `; let error = 0; try { const { code } = await transform(input, { filename: 'index.astro', sourcemap: 'inline' }); parse(code, { ecmaVersion: 'latest', sourceType: 'module' }); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should generate valid code'); }); test('does not generate invalid markup on table in expression', async () => { const input = ` <ul> {Astro.props.page.data.map(page => <li> <table> <tr><td>{page.frontmatter.title}</td></tr> <tr><td> <Debug {...Object.keys(page)} /> </td></tr> </table> </li> )} </ul> `; let error = 0; try { const { code } = await transform(input, { filename: 'index.astro', sourcemap: 'inline' }); parse(code, { ecmaVersion: 'latest', sourceType: 'module' }); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should generate valid code'); }); test('does not generate invalid markup on multiple tables', async () => { const input = ` <section> {["a", "b", "c"].map(char=> { <table> <tbody> {[1, 2, 3].map((num) => ( <tr>{num}</tr> ))} </tbody> </table> })} </section> <section></section> `; let error = 0; try { const { code } = await transform(input, { filename: 'index.astro', sourcemap: 'inline' }); parse(code, { ecmaVersion: 'latest', sourceType: 'module' }); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should generate valid code'); }); ================================================ FILE: packages/compiler/test/teardown/parse.ts ================================================ import { parse, teardown } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = '<div>hello</div>'; test('parse still works after teardown', async () => { const ast1 = await parse(FIXTURE); assert.ok(ast1); teardown(); // Make sure `parse` creates a new WASM instance after teardown removed the previous one const ast2 = await parse(FIXTURE); assert.ok(ast2); }); test.run(); ================================================ FILE: packages/compiler/test/transition/data-astro.ts ================================================ import { transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <div data-astro-reload> <a href="/" data-astro-reload>/</a> <form data-astro-reload="x">.</form> <area data-astro-reload/> <svg xmlns="http://www.w3.org/2000/svg"><a data-astro-reload>.</a></svg> <script is:inline data-astro-rerun src="some.js" type="module" /> <script is:inline data-astro-rerun>"Bar"</script> </div>`; test('Issues warnings for data-astro-* attributes', async () => { const result = await transform(FIXTURE); assert.equal(result.diagnostics.length, 2); assert.equal(result.diagnostics[0].code, 2000); assert.equal(result.diagnostics[1].code, 2010); }); test.run(); ================================================ FILE: packages/compiler/test/transition/meta.ts ================================================ import { type TransformResult, transform } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const FIXTURE = ` <div transition:animate="slide"></div> `; let result: TransformResult; test.before(async () => { result = await transform(FIXTURE, { resolvePath: async (s) => s, }); }); test('tagged with propagation metadata', () => { assert.equal(result.propagation, true); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/basic.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('basic', async () => { const input = ` --- let value = 'world'; --- <h1 name="value" empty {shorthand} expression={true} literal=\`tags\`>Hello {value}</h1> <div></div> `; const output = `${TSXPrefix} let value = 'world'; <Fragment> <h1 name="value" empty shorthand={shorthand} expression={true} literal={\`tags\`}>Hello {value}</h1> <div></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('named export', async () => { const input = ` --- let value = 'world'; --- <h1 name="value" empty {shorthand} expression={true} literal=\`tags\`>Hello {value}</h1> <div></div> `; const output = `${TSXPrefix} let value = 'world'; <Fragment> <h1 name="value" empty shorthand={shorthand} expression={true} literal={\`tags\`}>Hello {value}</h1> <div></div> </Fragment> export default function Test__AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { filename: '/Users/nmoo/test.astro', sourcemap: 'external', }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('moves @attributes to spread', async () => { const input = `<div @click={() => {}} name="value"></div>`; const output = `${TSXPrefix}<Fragment> <div name="value" {...{"@click":(() => {})}}></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('add trailing semicolon to frontmatter', async () => { const input = ` --- console.log("hello") --- {hello} `; const output = `${TSXPrefix} console.log("hello") {};<Fragment> {hello} </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('add trailing semicolon to frontmatter II', async () => { const input = ` --- const { hello } = Astro.props --- <div class={hello}></div> `; const output = `${TSXPrefix} const { hello } = Astro.props {};<Fragment> <div class={hello}></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('moves attributes with dots in them to spread', async () => { const input = `<div x-on:keyup.shift.enter="alert('Astro')" name="value"></div>`; const output = `${TSXPrefix}<Fragment> <div name="value" {...{"x-on:keyup.shift.enter":"alert('Astro')"}}></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('moves attributes that starts with : to spread', async () => { const input = `<div :class="hey" name="value"></div>`; const output = `${TSXPrefix}<Fragment> <div name="value" {...{":class":"hey"}}></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test("Don't move attributes to spread unnecessarily", async () => { const input = `<div 丽dfds_fsfdsfs aria-blarg name="value"></div>`; const output = `${TSXPrefix}<Fragment> <div 丽dfds_fsfdsfs aria-blarg name="value"></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves unclosed tags', async () => { const input = '<components.'; const output = `${TSXPrefix}<Fragment> <components. </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('template literal attribute', async () => { const input = '<div class=`${hello}`></div>'; const output = `${TSXPrefix}<Fragment> <div class={\`\${hello}\`}></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('unclosed tags', async () => { const input = `--- const myMarkdown = await import('../content/post.md'); --- <myMarkdown.`; const output = `${TSXPrefix} const myMarkdown = await import('../content/post.md'); <Fragment> <myMarkdown. </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('unclosed tags II', async () => { const input = `--- const myMarkdown = await import('../content/post.md'); --- <myMarkdown. `; const output = `${TSXPrefix} const myMarkdown = await import('../content/post.md'); <Fragment> <myMarkdown. </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('spread object', async () => { const input = `<DocSearch {...{ lang, labels: { modal, placeholder } }} client:only="preact" />`; const output = `${TSXPrefix}<Fragment> <DocSearch {...{ lang, labels: { modal, placeholder } }} client:only="preact" /> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('spread object II', async () => { const input = `<MainLayout {...Astro.props}> </MainLayout>`; const output = `${TSXPrefix}<Fragment> <MainLayout {...Astro.props}> </MainLayout> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('fragment with no name', async () => { const input = '<>+0123456789</>'; const output = `${TSXPrefix}<Fragment> <>+0123456789</> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves spaces in tag', async () => { const input = '<Button ></Button>'; const output = `${TSXPrefix}<Fragment> <Button ></Button> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves spaces after attributes in tag', async () => { const input = '<Button a="b" ></Button>'; const output = `${TSXPrefix}<Fragment> <Button a="b" ></Button> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves spaces in tag', async () => { const input = '<Button >'; const output = `${TSXPrefix}<Fragment> <Button ></Button> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves line returns in tag by transforming to space', async () => { const input = `<Button >`; const output = `${TSXPrefix}<Fragment> <Button ></Button> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('fragment with leading linebreak', async () => { const input = ` <>Test123</>`; const output = `${TSXPrefix}<Fragment> <>Test123</> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/comment-whitespace.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('preverve whitespace around jsx comments', async () => { const input = `{/* @ts-expect-error */} <Component prop="value"></Component> { // @ts-expect-error } <Component prop="value"></Component> { /* @ts-expect-error */ <Component prop="value"></Component> } { // @ts-expect-error <Component prop="value"></Component> }`; const output = `${TSXPrefix}<Fragment> {/* @ts-expect-error */} <Component prop="value"></Component> { // @ts-expect-error } <Component prop="value"></Component> { /* @ts-expect-error */ <Fragment><Component prop="value"></Component></Fragment> } { // @ts-expect-error <Fragment><Component prop="value"></Component></Fragment> } </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/complex-generics.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; const input = `--- // TODO: Due to this issue: https://github.com/withastro/astro/issues/1438, this route can't be in the same folder // as the paginated article list is or they'll conflict, so this means our articles URL are \`/article\/\${slug}\` instead // of \`/articles/\${slug}\` (with a s), once that issue is fixed, we'll be able to put it back in the right place import { Article, postProcessArticle } from "$data/articles"; import type { GetStaticPaths, MDXInstance } from "$data/shared"; import ArticleLayout from "$layouts/ArticleLayout.astro"; import { getSlugFromFile } from "$utils"; export const getStaticPaths: GetStaticPaths = async () => { const articles = await Astro.glob<Article>("/content/articles/**/*.mdx"); return articles.map((article) => { const augmentedFrontmatter = postProcessArticle(article.frontmatter, article.file); return { params: { slug: getSlugFromFile(article.file) }, props: { article: { ...article, frontmatter: augmentedFrontmatter } }, }; }); }; export interface Props { article: MDXInstance<Article>; } const { article } = Astro.props; --- <ArticleLayout article={article} />`; test('does not panic on complex generics', async () => { let error = 0; try { await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should not have panicked'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/escape.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('escapes braces in comment', async () => { const input = '<!-- {<div>Not JSX!<div/>}-->'; const output = `${TSXPrefix}<Fragment> {/** \\\\{<div>Not JSX!<div/>\\\\}*/} </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('always inserts space before comment', async () => { const input = '<!--/<div>Error?<div/>-->'; const output = `${TSXPrefix}<Fragment> {/** /<div>Error?<div/>*/} </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('simple escapes star slashes (*/)', async () => { const input = '<!--*/<div>Evil comment<div/>-->'; const output = `${TSXPrefix}<Fragment> {/** *\\/<div>Evil comment<div/>*/} </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('multiple escapes star slashes (*/)', async () => { const input = '<!--***/*/**/*/*/*/<div>Even more evil comment<div/>-->'; const output = `${TSXPrefix}<Fragment> {/** ***\\/*\\/**\\/*\\/*\\/*\\/<div>Even more evil comment<div/>*/} </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('does not escape tag opening unnecessarily', async () => { const input = `<div></div> <div`; const output = `${TSXPrefix}<Fragment> <div></div> <div </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('does not escape tag opening unnecessarily II', async () => { const input = `<div> <div </div> `; const output = `${TSXPrefix}<Fragment> <div> <div div {...{"<":true}}> </div></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('does not escape tag opening unnecessarily III', async () => { const input = '<div>{[].map((something) => <div><Blocknote</div><div><Image</div>)}</div>'; const output = `${TSXPrefix}<Fragment> <div>{[].map((something) => <Fragment><div><Blocknote< div><div><Image< div>)</Image<></div></Blocknote<></div></Fragment>}</div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/line-terminator.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('handles non-standard line terminators', async () => { const inputs = ['
', 'something
something', 'something

', '


']; let err = 0; for (const input of inputs) { try { await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); } catch (e) { err = 1; } } assert.equal(err, 0, 'did not error'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/literal-style-tag.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('preserve style tag position I', async () => { const input = `<html><body><h1>Hello world!</h1></body></html> <style></style>`; const output = `${TSXPrefix}<Fragment> <html><body><h1>Hello world!</h1></body></html> <style></style> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserve style tag position II', async () => { const input = `<html></html> <style></style>`; const output = `${TSXPrefix}<Fragment> <html></html> <style></style> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserve style tag position III', async () => { const input = `<html lang="en"><head><BaseHead /></head></html> <style>@use "../styles/global.scss";</style>`; const output = `${TSXPrefix}<Fragment> <html lang="en"><head><BaseHead /></head></html> <style>{\`@use "../styles/global.scss";\`}</style> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserve style tag position IV', async () => { const input = `<html lang="en"><head><BaseHead /></head><body><Header /></body></html> <style>@use "../styles/global.scss";</style>`; const output = `${TSXPrefix}<Fragment> <html lang="en"><head><BaseHead /></head><body><Header /></body></html> <style>{\`@use "../styles/global.scss";\`}</style> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserve style tag position V', async () => { const input = `<html lang="en"><head><BaseHead /></head><body><Header /></body><style>@use "../styles/global.scss";</style></html>`; const output = `${TSXPrefix}<Fragment> <html lang="en"><head><BaseHead /></head><body><Header /></body><style>{\`@use "../styles/global.scss";\`}</style></html> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/meta.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('return ranges', async () => { const input = `---\nconsole.log("Hello!")\n---\n\n<div></div>`; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal(metaRanges, { frontmatter: { start: 30, end: 54, }, body: { start: 68, end: 80, }, scripts: null, styles: null, }); }); test('return ranges - no frontmatter', async () => { const input = '<div></div>'; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal(metaRanges, { frontmatter: { start: 30, end: 30, }, body: { start: 41, end: 53, }, scripts: null, styles: null, }); }); test('return proper ranges with multibyte characters', async () => { const input = '---\n🦄\n---\n\n<div></div>'; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal(metaRanges, { frontmatter: { start: 30, end: 35, }, body: { start: 49, end: 61, }, scripts: null, styles: null, }); }); test('extract scripts', async () => { const input = `<script type="module">console.log({ test: \`literal\` })</script><script type="text/partytown">console.log({ test: \`literal\` })</script><script type="application/ld+json">{"a":"b"}</script><script is:inline>console.log("hello")</script><div onload="console.log('hey')"></div><script>console.log({ test: \`literal\` })</script><script is:raw>something;</script>`; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal( metaRanges.scripts, [ { position: { start: 22, end: 54, }, type: 'module', content: 'console.log({ test: `literal` })', lang: '', }, { position: { start: 93, end: 125, }, type: 'inline', content: 'console.log({ test: `literal` })', lang: '', }, { position: { start: 169, end: 178, }, type: 'json', content: '{"a":"b"}', lang: '', }, { position: { start: 205, end: 225, }, type: 'inline', content: 'console.log("hello")', lang: '', }, { position: { start: 247, end: 266, }, type: 'event-attribute', content: "console.log('hey')", lang: '', }, { position: { start: 281, end: 313, }, type: 'processed-module', content: 'console.log({ test: `literal` })', lang: '', }, { position: { start: 337, end: 347, }, type: 'raw', content: 'something;', lang: '', }, ], 'expected metaRanges.scripts to match snapshot' ); }); test('extract styles', async () => { const input = `<style>body { color: red; }</style><div style="color: blue;"></div><style lang="scss">body { color: red; }</style><style lang="pcss">body { color: red; }</style>`; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal( metaRanges.styles, [ { position: { start: 7, end: 27, }, type: 'tag', content: 'body { color: red; }', lang: 'css', }, { position: { start: 47, end: 60, }, type: 'style-attribute', content: 'color: blue;', lang: 'css', }, { position: { start: 86, end: 106, }, type: 'tag', content: 'body { color: red; }', lang: 'scss', }, { position: { start: 133, end: 153, }, type: 'tag', content: 'body { color: red; }', lang: 'pcss', }, ], 'expected metaRanges.styles to match snapshot' ); }); test('extract scripts and styles with multibyte characters', async () => { const scripts = "<script>console.log('🦄')</script><script>console.log('Hey');</script>"; const styles = "<style>body { background: url('🦄.png'); }</style><style>body { background: url('Hey'); }</style>"; const input = `${scripts}${styles}`; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal( metaRanges.scripts, [ { position: { start: 8, end: 25, }, type: 'processed-module', content: "console.log('🦄')", lang: '', }, { position: { start: 42, end: 61, }, type: 'processed-module', content: "console.log('Hey');", lang: '', }, ], 'expected metaRanges.scripts to match snapshot' ); assert.equal( metaRanges.styles, [ { position: { start: 77, end: 112, }, type: 'tag', content: "body { background: url('🦄.png'); }", lang: 'css', }, { position: { start: 127, end: 159, }, type: 'tag', content: "body { background: url('Hey'); }", lang: 'css', }, ], 'expected metaRanges.styles to match snapshot' ); }); test('extract scripts with multibyte characters II', async () => { // Emojis with various byte lengths (in order, 4, 3, 8, 28) and newlines, a complicated case, if you will const input = `🀄✂🇸🇪👩🏻‍❤️‍👩🏽<script> console.log("🀄✂🇸🇪👩🏻‍❤️‍👩🏽"); </script>🀄✂🇸🇪👩🏻‍❤️‍👩🏽<div onload="console.log('🀄✂🇸🇪👩🏻‍❤️‍👩🏽')"></div>`; const { metaRanges } = await convertToTSX(input, { sourcemap: 'external' }); assert.equal( metaRanges.scripts, [ { position: { start: 27, end: 65, }, type: 'processed-module', content: '\n\tconsole.log("🀄✂🇸🇪👩🏻‍❤️‍👩🏽");\n', lang: '', }, { position: { start: 106, end: 141, }, type: 'event-attribute', content: "console.log('🀄✂🇸🇪👩🏻‍❤️‍👩🏽')", lang: '', }, ], 'expected metaRanges.scripts to match snapshot' ); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/nested-generics.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('handles plain aliases', async () => { const input = `--- interface LocalImageProps {} type Props = LocalImageProps; ---`; const output = await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); assert.ok(output.code.includes('(_props: Props)'), 'Includes aliased Props as correct props'); }); test('handles aliases with nested generics', async () => { const input = `--- interface LocalImageProps { src: Promise<{ default: string }>; } type Props = LocalImageProps; ---`; const output = await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); assert.ok(output.code.includes('(_props: Props)'), 'Includes aliased Props as correct props'); }); test('gracefully handles Image props', async () => { const input = `--- interface LocalImageProps extends Omit<HTMLAttributes, 'src' | 'width' | 'height'>, Omit<TransformOptions, 'src'>, Pick<astroHTML.JSX.ImgHTMLAttributes, 'loading' | 'decoding'> { src: ImageMetadata | Promise<{ default: ImageMetadata }>; /** Defines an alternative text description of the image. Set to an empty string (alt="") if the image is not a key part of the content (it's decoration or a tracking pixel). */ alt: string; sizes: HTMLImageElement['sizes']; widths: number[]; formats?: OutputFormat[]; } interface RemoteImageProps extends Omit<HTMLAttributes, 'src' | 'width' | 'height'>, TransformOptions, Pick<ImgHTMLAttributes, 'loading' | 'decoding'> { src: string; /** Defines an alternative text description of the image. Set to an empty string (alt="") if the image is not a key part of the content (it's decoration or a tracking pixel). */ alt: string; sizes: HTMLImageElement['sizes']; widths: number[]; aspectRatio: TransformOptions['aspectRatio']; formats?: OutputFormat[]; background: TransformOptions['background']; } export type Props = LocalImageProps | RemoteImageProps; ---`; const output = await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); assert.ok(output.code.includes('(_props: Props)'), 'Includes aliased Props as correct props'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/non-latin.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; // https://mathiasbynens.be/notes/javascript-identifiers const value = ` // Let's goooooo 🚀🚀🚀 // How convenient! var π = Math.PI; // Sometimes, you just have to use the Bad Parts of JavaScript: var ಠ_ಠ = eval; // Code, Y U NO WORK?! var ლ_ಠ益ಠ_ლ = 42; // How about a JavaScript library for functional programming? var λ = function() {}; // Obfuscate boring variable names for great justice var \u006C\u006F\u006C\u0077\u0061\u0074 = 'heh'; // …or just make up random ones var Ꙭൽↈⴱ = 'huh'; // Did you know about the [.] syntax? var ᱹ = 1; console.assert([1, 2, 3][ᱹ] === 2); // While perfectly valid, this doesn’t work in most browsers: var foo\u200Cbar = 42; // This is *not* a bitwise left shift (\`<<\`): var 〱〱 = 2; // This is, though: 〱〱 << 〱〱; // 8 // Give yourself a discount: var price_9̶9̶_89 = 'cheap'; // Fun with Roman numerals var Ⅳ = 4; var Ⅴ = 5; Ⅳ + Ⅴ; // 9 // Cthulhu was here var Hͫ̆̒̐ͣ̊̄ͯ͗͏̵̗̻̰̠̬͝ͅE̴̷̬͎̱̘͇͍̾ͦ͊͒͊̓̓̐_̫̠̱̩̭̤͈̑̎̋ͮͩ̒͑̾͋͘Ç̳͕̯̭̱̲̣̠̜͋̍O̴̦̗̯̹̼ͭ̐ͨ̊̈͘͠M̶̝̠̭̭̤̻͓͑̓̊ͣͤ̎͟͠E̢̞̮̹͍̞̳̣ͣͪ͐̈T̡̯̳̭̜̠͕͌̈́̽̿ͤ̿̅̑Ḧ̱̱̺̰̳̹̘̰́̏ͪ̂̽͂̀͠ = 'Zalgo';`; test('non-latin characters', async () => { const input = ` --- ${value} --- <div></div> `; const output = `${TSXPrefix} ${value} <Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/props-and-getStaticPaths.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; function getPrefix({ props = `ASTRO__MergeUnion<ASTRO__Get<ASTRO__InferredGetStaticPath, 'props'>>`, component = '__AstroComponent_', params = `ASTRO__Get<ASTRO__InferredGetStaticPath, 'params'>`, }: { props?: string; component?: string; params?: string; } = {}) { return `/** * Astro global available in all contexts in .astro files * * [Astro documentation](https://docs.astro.build/reference/api-reference/#astro-global) */ declare const Astro: Readonly<import('astro').AstroGlobal<${props}, typeof ${component}${params ? `, ${params}` : ''}>>`; } function getSuffix() { return `type ASTRO__ArrayElement<ArrayType extends readonly unknown[]> = ArrayType extends readonly (infer ElementType)[] ? ElementType : never; type ASTRO__Flattened<T> = T extends Array<infer U> ? ASTRO__Flattened<U> : T; type ASTRO__InferredGetStaticPath = ASTRO__Flattened<ASTRO__ArrayElement<Awaited<ReturnType<typeof getStaticPaths>>>>; type ASTRO__MergeUnion<T, K extends PropertyKey = T extends unknown ? keyof T : never> = T extends unknown ? T & { [P in Exclude<K, keyof T>]?: never } extends infer O ? { [P in keyof O]: O[P] } : never : never; type ASTRO__Get<T, K> = T extends undefined ? undefined : K extends keyof T ? T[K] : never;`; } test('explicit props definition', async () => { const input = `--- interface Props {}; export function getStaticPaths() { return {}; } --- <div></div>`; const output = `${TSXPrefix}\ninterface Props {}; export function getStaticPaths() { return {}; } {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${getSuffix()} ${getPrefix({ props: 'Props' })}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('inferred props', async () => { const input = `--- export function getStaticPaths() { return {}; } --- <div></div>`; const output = `${TSXPrefix}\nexport function getStaticPaths() { return {}; } {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: ASTRO__MergeUnion<ASTRO__Get<ASTRO__InferredGetStaticPath, 'props'>>): any {} ${getSuffix()} ${getPrefix()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/props.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; const PREFIX = (component = '__AstroComponent_') => `/** * Astro global available in all contexts in .astro files * * [Astro documentation](https://docs.astro.build/reference/api-reference/#astro-global) */ declare const Astro: Readonly<import('astro').AstroGlobal<Props, typeof ${component}>>`; test('no props', async () => { const input = '<div></div>'; const output = `${TSXPrefix}<Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('nested Props', async () => { const input = `--- function DoTheThing(Props) {} ---`; const output = `${TSXPrefix} function DoTheThing(Props) {} export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props interface', async () => { const input = ` --- interface Props {} --- <div></div> `; const output = `${TSXPrefix} interface Props {} {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props import', async () => { const input = ` --- import { Props } from './somewhere'; --- <div></div> `; const output = `${TSXPrefix} import { Props } from './somewhere'; <Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props alias', async () => { const input = ` --- import { MyComponent as Props } from './somewhere'; --- <div></div> `; const output = `${TSXPrefix} import { MyComponent as Props } from './somewhere'; <Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props type import', async () => { const input = ` --- import type { Props } from './somewhere'; --- <div></div> `; const output = `${TSXPrefix} import type { Props } from './somewhere'; <Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props type', async () => { const input = ` --- type Props = {} --- <div></div> `; const output = `${TSXPrefix} type Props = {} {};<Fragment> <div></div> </Fragment> export default function Test__AstroComponent_(_props: Props): any {} ${PREFIX('Test__AstroComponent_')}`; const { code } = await convertToTSX(input, { filename: '/Users/nmoo/test.astro', sourcemap: 'external', }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props generic (simple)', async () => { const input = ` --- interface Props<T> {} --- <div></div> `; const output = `${TSXPrefix} interface Props<T> {} {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_<T>(_props: Props<T>): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props generic (complex)', async () => { const input = ` --- interface Props<T extends Other<{ [key: string]: any }>> {} --- <div></div> `; const output = `${TSXPrefix} interface Props<T extends Other<{ [key: string]: any }>> {} {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_<T extends Other<{ [key: string]: any }>>(_props: Props<T>): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props generic (very complex)', async () => { const input = ` --- interface Props<T extends { [key: string]: any }, P extends string ? { [key: string]: any }: never> {} --- <div></div> `; const output = `${TSXPrefix} interface Props<T extends { [key: string]: any }, P extends string ? { [key: string]: any }: never> {} {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_<T extends { [key: string]: any }, P extends string ? { [key: string]: any }: never>(_props: Props<T, P>): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props generic (very complex II)', async () => { const input = ` --- interface Props<T extends Something<false> ? A : B, P extends string ? { [key: string]: any }: never> {} --- <div></div> `; const output = `${TSXPrefix} interface Props<T extends Something<false> ? A : B, P extends string ? { [key: string]: any }: never> {} {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_<T extends Something<false> ? A : B, P extends string ? { [key: string]: any }: never>(_props: Props<T, P>): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('polymorphic props', async () => { const input = ` --- interface Props<Tag extends keyof JSX.IntrinsicElements> extends HTMLAttributes<Tag> { as?: Tag; } --- <div></div> `; const output = `${TSXPrefix} interface Props<Tag extends keyof JSX.IntrinsicElements> extends HTMLAttributes<Tag> { as?: Tag; } {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_<Tag extends keyof JSX.IntrinsicElements>(_props: Props<Tag>): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('unrelated prop import', async () => { const input = ` --- import SvelteOptionalProps from './SvelteOptionalProps.svelte'; --- <SvelteOptionalProps /> `; const output = `${TSXPrefix} import SvelteOptionalProps from './SvelteOptionalProps.svelte'; <Fragment> <SvelteOptionalProps /> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('unrelated sibling prop', async () => { const input = `--- import type { Props as ComponentBProps } from './ComponentB.astro' --- <div /> `; const output = `${TSXPrefix} import type { Props as ComponentBProps } from './ComponentB.astro' {};<Fragment> <div /> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props interface with as property', async () => { const input = `--- interface Props { as?: string; href?: string; } --- <div></div> `; const output = `${TSXPrefix} interface Props { as?: string; href?: string; } {};<Fragment> <div></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props with destructured as property', async () => { const input = `--- interface Props { as?: string; className?: string; } const { as, className } = Astro.props; --- <div class={className}>{as}</div> `; const output = `${TSXPrefix} interface Props { as?: string; className?: string; } const { as, className } = Astro.props; <Fragment> <div class={className}>{as}</div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props with renamed as property in destructuring', async () => { const input = `--- interface Props { as?: string; } const { as: element } = Astro.props; --- <div>{element}</div> `; const output = `${TSXPrefix} interface Props { as?: string; } const { as: element } = Astro.props; <Fragment> <div>{element}</div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props interface with as and other properties', async () => { const input = `--- interface Props extends HTMLAttributes<'div'> { as?: keyof HTMLElementTagNameMap; variant?: 'primary' | 'secondary'; size?: 'sm' | 'md' | 'lg'; } const { as = 'div', variant = 'primary', size = 'md', ...rest } = Astro.props; --- <div data-variant={variant} data-size={size}></div> `; const output = `${TSXPrefix} interface Props extends HTMLAttributes<'div'> { as?: keyof HTMLElementTagNameMap; variant?: 'primary' | 'secondary'; size?: 'sm' | 'md' | 'lg'; } const { as = 'div', variant = 'primary', size = 'md', ...rest } = Astro.props; <Fragment> <div data-variant={variant} data-size={size}></div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('props type alias with as property', async () => { const input = `--- type Props = { as?: string; children?: any; } const props = Astro.props as Props; --- <div>{props.children}</div> `; const output = `${TSXPrefix} type Props = { as?: string; children?: any; } const props = Astro.props as Props; <Fragment> <div>{props.children}</div> </Fragment> export default function __AstroComponent_(_props: Props): any {} ${PREFIX()}`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/raw.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('style is raw', async () => { const input = '<style>div { color: red; }</style>'; const output = `${TSXPrefix}<Fragment> <style>{\`div { color: red; }\`}</style> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('is:raw is raw', async () => { const input = '<div is:raw>A{B}C</div>'; const output = `${TSXPrefix}<Fragment> <div is:raw>{\`A{B}C\`}</div> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/script.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('script function', async () => { const input = `<script type="module">console.log({ test: \`literal\` })</script>`; const output = `${TSXPrefix}<Fragment> <script type="module"> {() => {console.log({ test: \`literal\` })}} </script> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('partytown function', async () => { const input = `<script type="text/partytown">console.log({ test: \`literal\` })</script>`; const output = `${TSXPrefix}<Fragment> <script type="text/partytown"> {() => {console.log({ test: \`literal\` })}} </script> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('ld+json wrapping', async () => { const input = `<script type="application/ld+json">{"a":"b"}</script>`; const output = `${TSXPrefix}<Fragment> <script type="application/ld+json">{\`{"a":"b"}\`}</script> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('escape unknown types', async () => { const input = `<script type="text/somethigndf" is:inline>console.log("something");</script>`; const output = `${TSXPrefix}<Fragment> <script type="text/somethigndf" is:inline>{\`console.log("something");\`}</script> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test("don't include scripts if disabled", async () => { const input = ` <script>hello;</script> <script type="module">hello;</script> <script type="text/partytown">hello;</script> <script type="application/ld+json">hello;</script> <script type="text/somethigndf" is:inline>hello;</script>`; const output = `${TSXPrefix}<Fragment> <script></script> <script type="module"></script> <script type="text/partytown"></script> <script type="application/ld+json"></script> <script type="text/somethigndf" is:inline></script> </Fragment> export default function __AstroComponent_(_props: Record<string, any>): any {}\n`; const { code } = await convertToTSX(input, { sourcemap: 'external', includeScripts: false }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx/top-level-returns.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { TSXPrefix } from '../utils.js'; test('transforms top-level returns to throw statements', async () => { const input = `--- if (something) { return Astro.redirect(); } function thatDoesSomething() { return "Hey"; } class Component { render() { return "wow"! } } ---`; const output = `${TSXPrefix} if (something) { throw Astro.redirect(); } function thatDoesSomething() { return "Hey"; } class Component { render() { return "wow"! } } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves returns inside arrow functions', async () => { const input = `--- const foo = () => { return "value"; } if (condition) { return Astro.redirect("/login"); } ---`; const output = `${TSXPrefix} const foo = () => { return "value"; } if (condition) { throw Astro.redirect("/login"); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('preserves returns inside object methods', async () => { const input = `--- const something = { someFunction: () => { return "Hello World"; }, someOtherFunction() { return "Hello World"; }, }; if (true) { return Astro.redirect(); } ---`; const output = `${TSXPrefix} const something = { someFunction: () => { return "Hello World"; }, someOtherFunction() { return "Hello World"; }, }; if (true) { throw Astro.redirect(); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('handles multiple top-level returns', async () => { const input = `--- if (condition1) { return Astro.redirect("/a"); } if (condition2) { return Astro.redirect("/b"); } ---`; const output = `${TSXPrefix} if (condition1) { throw Astro.redirect("/a"); } if (condition2) { throw Astro.redirect("/b"); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('no transformation when no top-level returns', async () => { const input = `--- function foo() { return "bar"; } const arrow = () => { return "baz"; } ---`; const output = `${TSXPrefix} function foo() { return "bar"; } const arrow = () => { return "baz"; } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('handles TypeScript syntax without losing returns', async () => { const input = `--- type Response = { status: number }; const handler = (input: string): Response => { return { status: 200 }; }; const value = (foo as string); if (value) { return Astro.redirect('/ok'); } ---`; const output = `${TSXPrefix} type Response = { status: number }; const handler = (input: string): Response => { return { status: 200 }; }; const value = (foo as string); if (value) { throw Astro.redirect('/ok'); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('does not transform returns in nested class or object methods', async () => { const input = `--- class Foo { method() { return 'foo'; } arrow = () => { return 'bar'; } } const obj = { method() { return 'baz'; }, }; if (true) { return Astro.redirect('/nested'); } ---`; const output = `${TSXPrefix} class Foo { method() { return 'foo'; } arrow = () => { return 'bar'; } } const obj = { method() { return 'baz'; }, }; if (true) { throw Astro.redirect('/nested'); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('handles computed methods and generic arrows', async () => { const input = `--- class Foo { ['get']() { return 'ok'; } static ['load']() { return 'static'; } } const obj = { ['get']() { return 'obj'; }, }; const generic = <T,>(value: T) => { return value; }; if (true) { return Astro.redirect('/computed'); } ---`; const output = `${TSXPrefix} class Foo { ['get']() { return 'ok'; } static ['load']() { return 'static'; } } const obj = { ['get']() { return 'obj'; }, }; const generic = <T,>(value: T) => { return value; }; if (true) { throw Astro.redirect('/computed'); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('handles satisfies and as const in top-level conditionals', async () => { const input = `--- const config = { flag: true, } as const; const map = { name: 'astro', } satisfies Record<string, string>; if (config.flag) { return Astro.redirect('/satisfies'); } ---`; const output = `${TSXPrefix} const config = { flag: true, } as const; const map = { name: 'astro', } satisfies Record<string, string>; if (config.flag) { throw Astro.redirect('/satisfies'); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test('handles type-position arrows without missing top-level returns', async () => { const input = `--- type Fn = () => void; type Factory = (value: string) => { ok: boolean }; if (true) { return Astro.redirect('/types'); } ---`; const output = `${TSXPrefix} type Fn = () => void; type Factory = (value: string) => { ok: boolean }; if (true) { throw Astro.redirect('/types'); } export default function __AstroComponent_(_props: Record<string, any>): any {} `; const { code } = await convertToTSX(input, { sourcemap: 'external' }); assert.snapshot(code, output, 'expected code to match snapshot'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-errors/eof.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { TSXResult } from '../../types.js'; const FIXTURE = `<html> <head> <title>Hello world</title> </head> <body> <div> {/* </div> </body> </html>`; let result: TSXResult; test.before(async () => { result = await convertToTSX(FIXTURE, { filename: '/src/components/EOF.astro', }); }); test('got a tokenizer error', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is(result.diagnostics[0].text, 'Unterminated comment'); assert.is(FIXTURE.split('\n')[result.diagnostics[0].location.line - 1], ' {/*'); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-errors/fragment-shorthand.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { TSXResult } from '../../types.js'; const FIXTURE = `<html> <head> <title>Hello world</title> </head> <body> < data-test="hello"><div></div></> </body> </html>`; let result: TSXResult; test.before(async () => { result = await convertToTSX(FIXTURE, { filename: '/src/components/fragment.astro', }); }); test('got a tokenizer error', () => { assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 1); assert.is( result.diagnostics[0].text, 'Unable to assign attributes when using <> Fragment shorthand syntax!' ); const loc = result.diagnostics[0].location; assert.is(FIXTURE.split('\n')[loc.line - 1], ` < data-test="hello"><div></div></>`); assert.is( FIXTURE.split('\n')[loc.line - 1].slice(loc.column - 1, loc.column - 1 + loc.length), `< data-test="hello">` ); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-errors/unfinished-component.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import type { TSXResult } from '../../types.js'; const FIXTURE = '<div class={'; let result: TSXResult; test.before(async () => { result = await convertToTSX(FIXTURE, { filename: '/src/components/unfinished.astro', }); }); test('did not crash on unfinished component', () => { assert.ok(result); assert.ok(Array.isArray(result.diagnostics)); assert.is(result.diagnostics.length, 0); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/404.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('404 generates a valid identifier', async () => { const input = '<div {name} />'; const output = await convertToTSX(input, { filename: '404.astro', sourcemap: 'inline' }); assert.match(output.code, 'export default function __AstroComponent_'); }); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/attributes.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('shorthand attribute', async () => { const input = '<div {name} />'; const output = await testTSXSourcemap(input, 'name'); assert.equal(output, { source: 'index.astro', line: 1, column: 6, name: null, }); }); test('empty quoted attribute', async () => { const input = `<div src="" />`; const open = await testTSXSourcemap(input, '"'); assert.equal(open, { source: 'index.astro', line: 1, column: 9, name: null, }); }); test('template literal attribute', async () => { const input = `--- --- <Tag src=\`bar\${foo}\` />`; const open = await testTSXSourcemap(input, 'foo'); assert.equal(open, { source: 'index.astro', line: 3, column: 16, name: null, }); }); test('multiline quoted attribute', async () => { const input = `<path d="M 0 C100 0 Z" />`; const output = await testTSXSourcemap(input, 'Z'); assert.equal(output, { source: 'index.astro', line: 3, column: 1, name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/deprecated.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('script is:inline', async () => { const input = `--- /** @deprecated */ const deprecated = "Astro" deprecated; const hello = "Astro" --- `; const output = await testTSXSourcemap(input, 'deprecated;'); assert.equal(output, { line: 4, column: 1, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/error.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('svelte error', async () => { const input = `--- import SvelteOptionalProps from "./SvelteOptionalProps.svelte" --- <SvelteOptionalProps></SvelteOptionalProps>`; const output = await testTSXSourcemap(input, '<SvelteOptionalProps>'); assert.equal(output, { line: 5, column: 1, source: 'index.astro', name: null, }); }); test('vue error', async () => { const input = `--- import SvelteError from "./SvelteError.svelte" import VueError from "./VueError.vue" --- <SvelteError></SvelteError> <VueError></VueError>`; const svelte = await testTSXSourcemap(input, '<SvelteError>'); assert.equal(svelte, { line: 6, column: 1, source: 'index.astro', name: null, }); const vue = await testTSXSourcemap(input, '<VueError>'); assert.equal(vue, { line: 7, column: 1, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/frontmatter.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('frontmatter', async () => { const input = `--- nonexistent --- `; const output = await testTSXSourcemap(input, 'nonexistent'); assert.equal(output, { line: 2, column: 1, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/hover.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; const fixture = `--- const MyVariable = "Astro" /** Documentation */ const MyDocumentedVariable = "Astro" /** @author Astro */ const MyJSDocVariable = "Astro" --- `; test('hover I', async () => { const input = fixture; const output = await testTSXSourcemap(input, 'MyVariable'); assert.equal(output, { line: 2, column: 11, source: 'index.astro', name: null, }); }); test('hover II', async () => { const input = fixture; const output = await testTSXSourcemap(input, 'MyDocumentedVariable'); assert.equal(output, { line: 5, column: 11, source: 'index.astro', name: null, }); }); test('hover III', async () => { const input = fixture; const output = await testTSXSourcemap(input, 'MyJSDocVariable'); assert.equal(output, { line: 8, column: 11, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/module.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('script is:inline', async () => { const input = `--- // valid import { foo } from './script.js'; import ComponentAstro from './astro.astro'; import ComponentSvelte from './svelte.svelte'; import ComponentVue from './vue.vue'; // invalid import { baz } from './script'; foo;baz;ComponentAstro;ComponentSvelte;ComponentVue; --- `; const output = await testTSXSourcemap(input, `'./script'`); assert.equal(output, { line: 8, column: 23, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/multibyte.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('multibyte content', async () => { const input = '<h1>ツ</h1>'; const output = await testTSXSourcemap(input, 'ツ'); assert.equal(output, { source: 'index.astro', line: 1, column: 4, name: null, }); }); test('content after multibyte character', async () => { const input = '<h1>ツ</h1><p>foobar</p>'; const output = await testTSXSourcemap(input, 'foobar'); assert.equal(output, { source: 'index.astro', line: 1, column: 13, name: null, }); }); test('many characters', async () => { const input = '<h1>こんにちは</h1>'; const output = await testTSXSourcemap(input, 'ん'); assert.equal(output, { source: 'index.astro', line: 1, column: 5, name: null, }); }); test('many characters', async () => { const input = '<h1>こんにちは</h1>'; const output = await testTSXSourcemap(input, 'に'); assert.equal(output, { source: 'index.astro', line: 1, column: 6, name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/script.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('script is:inline', async () => { const input = `<script is:inline> const MyNumber = 3; console.log(MyNumber.toStrang()); </script> `; const output = await testTSXSourcemap(input, '\n'); assert.equal(output, { line: 1, column: 18, source: 'index.astro', name: null, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/tags.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { TraceMap, generatedPositionFor } from '@jridgewell/trace-mapping'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('tag close', async () => { const input = '<Hello></Hello>'; const output = await testTSXSourcemap(input, '>'); assert.equal(output, { line: 1, column: 6, source: 'index.astro', name: null, }); }); test('tag with spaces', async () => { const input = '<Button ></Button>'; const { map } = await convertToTSX(input, { sourcemap: 'both', filename: 'index.astro' }); const tracer = new TraceMap(map as any); const generated = generatedPositionFor(tracer, { source: 'index.astro', line: 1, column: 14 }); assert.equal(generated, { line: 4, column: 9, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/template-windows.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('last character does not end up in middle of CRLF', async () => { const input = "---\r\nimport { Meta } from '$lib/components/Meta.astro';\r\n---\r\n"; const output = await testTSXSourcemap(input, ';'); assert.equal(output, { source: 'index.astro', line: 2, column: 50, name: null, }); }); test('template expression basic', async () => { const input = '<div>{\r\nnonexistent\r\n}</div>'; const output = await testTSXSourcemap(input, 'nonexistent'); assert.equal(output, { source: 'index.astro', line: 2, column: 1, name: null, }); }); test('template expression has dot', async () => { const input = '<div>{\nconsole.log(hey)\n}</div>'; const output = await testTSXSourcemap(input, 'log'); assert.equal(output, { source: 'index.astro', line: 2, column: 9, name: null, }); }); test('template expression has dot', async () => { const input = '<div>{\r\nconsole.log(hey)\r\n}</div>'; const output = await testTSXSourcemap(input, 'log'); assert.equal(output, { source: 'index.astro', line: 2, column: 9, name: null, }); }); test('template expression with addition', async () => { const input = `{"hello" + \nhey}`; const output = await testTSXSourcemap(input, 'hey'); assert.equal(output, { source: 'index.astro', line: 2, column: 1, name: null, }); }); test('template expression with addition', async () => { const input = `{"hello" + \r\nhey}`; const output = await testTSXSourcemap(input, 'hey'); assert.equal(output, { source: 'index.astro', line: 2, column: 1, name: null, }); }); test('html attribute', async () => { const input = `<svg\nvalue="foo" color="#000"></svg>`; const output = await testTSXSourcemap(input, 'color'); assert.equal(output, { source: 'index.astro', name: null, line: 2, column: 12, }); }); test('html attribute', async () => { const input = `<svg\r\nvalue="foo" color="#000"></svg>`; const output = await testTSXSourcemap(input, 'color'); assert.equal(output, { source: 'index.astro', name: null, line: 2, column: 12, }); }); test('complex template expression', async () => { const input = `{[].map(ITEM => {\r\nv = "what";\r\nreturn <div>{ITEMS}</div>\r\n})}`; const item = await testTSXSourcemap(input, 'ITEM'); const items = await testTSXSourcemap(input, 'ITEMS'); assert.equal(item, { source: 'index.astro', name: null, line: 1, column: 8, }); assert.equal(items, { source: 'index.astro', name: null, line: 3, column: 14, }); }); test('attributes', async () => { const input = `<div\r\na="b" className="hello" />`; const className = await testTSXSourcemap(input, 'className'); assert.equal(className, { source: 'index.astro', name: null, line: 2, column: 6, }); }); test('special attributes', async () => { const input = `<div\r\na="b" @on.click="fn" />`; const onClick = await testTSXSourcemap(input, '@on.click'); assert.equal(onClick, { source: 'index.astro', name: null, line: 2, column: 6, }); }); test('whitespace', async () => { const input = `---\r\nimport A from "a";\r\n\timport B from "b";\r\n---\r\n`; const { code } = await convertToTSX(input, { sourcemap: 'both', filename: 'index.astro' }); assert.match(code, '\t', 'output includes \\t'); const B = await testTSXSourcemap(input, 'B'); assert.equal(B, { source: 'index.astro', name: null, line: 3, column: 9, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/template.ts ================================================ import { test } from 'uvu'; import * as assert from 'uvu/assert'; import { testTSXSourcemap } from '../utils.js'; test('template expression basic', async () => { const input = '<div>{nonexistent}</div>'; const output = await testTSXSourcemap(input, 'nonexistent'); assert.equal(output, { source: 'index.astro', line: 1, column: 6, name: null, }); }); test('template expression has dot', async () => { const input = '<div>{console.log(hey)}</div>'; const output = await testTSXSourcemap(input, 'log'); assert.equal(output, { source: 'index.astro', line: 1, column: 14, name: null, }); }); test('template expression with addition', async () => { const input = `{"hello" + hey}`; const output = await testTSXSourcemap(input, 'hey'); assert.equal(output, { source: 'index.astro', line: 1, column: 11, name: null, }); }); test('html attribute', async () => { const input = `<svg color="#000"></svg>`; const output = await testTSXSourcemap(input, 'color'); assert.equal(output, { source: 'index.astro', name: null, line: 1, column: 5, }); }); test('complex template expression', async () => { const input = `{[].map(ITEM => { v = "what"; return <div>{ITEMS}</div> })}`; const item = await testTSXSourcemap(input, 'ITEM'); const items = await testTSXSourcemap(input, 'ITEMS'); assert.equal(item, { source: 'index.astro', name: null, line: 1, column: 8, }); assert.equal(items, { source: 'index.astro', name: null, line: 3, column: 14, }); }); test('attributes', async () => { const input = `<div className="hello" />`; const className = await testTSXSourcemap(input, 'className'); assert.equal(className, { source: 'index.astro', name: null, line: 1, column: 5, }); }); test('special attributes', async () => { const input = `<div @on.click="fn" />`; const onClick = await testTSXSourcemap(input, '@on.click'); assert.equal(onClick, { source: 'index.astro', name: null, line: 1, column: 5, }); }); test.run(); ================================================ FILE: packages/compiler/test/tsx-sourcemaps/unfinished-literal.ts ================================================ import { convertToTSX } from '@astrojs/compiler'; import { test } from 'uvu'; import * as assert from 'uvu/assert'; test('does not panic on unfinished template literal attribute', async () => { const input = `<div class=\`></div> `; let error = 0; try { const output = await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); assert.match(output.code, 'class={``}'); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should not have panicked'); }); test('does not panic on unfinished double quoted attribute', async () => { const input = `<main id="gotcha />`; let error = 0; try { const output = await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); assert.match(output.code, `id="gotcha"`); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should not have panicked'); }); test('does not panic on unfinished single quoted attribute', async () => { const input = `<main id='gotcha/>`; let error = 0; try { const output = await convertToTSX(input, { filename: 'index.astro', sourcemap: 'inline' }); assert.match(output.code, `id="gotcha"`); } catch (e) { error = 1; } assert.equal(error, 0, 'compiler should not have panicked'); }); ================================================ FILE: packages/compiler/test/utils.ts ================================================ import { convertToTSX, transform } from '@astrojs/compiler'; import { TraceMap, generatedPositionFor, originalPositionFor } from '@jridgewell/trace-mapping'; import sass from 'sass'; export async function preprocessStyle(value: any, attrs: any): Promise<any> { if (!attrs.lang) { return null; } if (attrs.lang === 'scss') { return transformSass(value); } return null; } export function transformSass(value: string) { return new Promise((resolve, reject) => { sass.render({ data: value }, (err, result) => { if (err) { reject(err); return; } resolve({ code: result.css.toString('utf8'), map: result.map }); return; }); }); } export function getPositionFor(input: string, snippet: string) { let index = 0; let line = 0; let column = 0; for (const c of input) { if (c === snippet[0] && input.slice(index).startsWith(snippet)) { return { line: line + 1, column }; } if (c === '\n') { line++; column = 0; } column++; index++; } return null; } export async function testTSXSourcemap(input: string, snippet: string) { const snippetLoc = getPositionFor(input, snippet); if (!snippetLoc) throw new Error(`Unable to find "${snippet}"`); const { code, map } = await convertToTSX(input, { sourcemap: 'both', filename: 'index.astro' }); const tracer = new TraceMap(map as any); const generated = generatedPositionFor(tracer, { source: 'index.astro', line: snippetLoc.line, column: snippetLoc.column, }); if (!generated || generated.line === null) { console.log(code); throw new Error(`"${snippet}" position incorrectly mapped in generated output.`); } const originalPosition = originalPositionFor(tracer, { line: generated.line, column: generated.column, }); return originalPosition; } export async function testJSSourcemap(input: string, snippet: string) { const snippetLoc = getPositionFor(input, snippet); if (!snippetLoc) throw new Error(`Unable to find "${snippet}"`); const { code, map } = await transform(input, { sourcemap: 'both', filename: 'index.astro', resolvePath: (i: string) => i, }); const tracer = new TraceMap(map); const generated = generatedPositionFor(tracer, { source: 'index.astro', line: snippetLoc.line, column: snippetLoc.column, }); if (!generated || generated.line === null) { console.log(code); throw new Error(`"${snippet}" position incorrectly mapped in generated output.`); } const originalPosition = originalPositionFor(tracer, { line: generated.line, column: generated.column, }); return originalPosition; } export const TSXPrefix = '/* @jsxImportSource astro */\n\n'; ================================================ FILE: packages/compiler/tsconfig.json ================================================ { "compilerOptions": { "target": "ES2020", "module": "Node16", "moduleResolution": "Node16", "strict": true, "noEmit": true, "declaration": true, "noImplicitOverride": true, "noUnusedLocals": true, "esModuleInterop": true, "verbatimModuleSyntax": true }, "exclude": ["node_modules"] } ================================================ FILE: packages/compiler/tsup.config.ts ================================================ import { defineConfig } from 'tsup'; export default defineConfig((options) => ({ entry: ['src/node/**', 'src/browser/**', 'src/shared/**'], outDir: 'dist', format: ['cjs', 'esm'], dts: true, clean: true, minify: !options.watch, sourcemap: Boolean(options.watch), watch: options.watch, publicDir: 'wasm', shims: true, })); ================================================ FILE: packages/compiler/types.d.ts ================================================ export type * from './dist/shared/types.js'; ================================================ FILE: packages/compiler/utils.d.ts ================================================ export * from './dist/node/utils.js'; ================================================ FILE: pnpm-workspace.yaml ================================================ packages: - 'packages/*'