Add iptable-parser as submodule

This commit is contained in:
infidel
2023-10-30 14:40:43 +07:00
parent 64c9e9c11b
commit be27c0882e
35887 changed files with 2661509 additions and 69 deletions

View File

@@ -0,0 +1,2 @@
from . import base_component # noqa:F401
from . import component_loader # noqa:F401

View File

@@ -0,0 +1,105 @@
# keyword.kwlist for both Python 2 and 3
python_keywords = {
"False",
"None",
"True",
"and",
"as",
"assert",
"async",
"await",
"break",
"class",
"continue",
"def",
"del",
"elif",
"else",
"except",
"exec",
"finally",
"for",
"from",
"global",
"if",
"import",
"in",
"is",
"lambda",
"nonlocal",
"not",
"or",
"pass",
"print",
"raise",
"return",
"try",
"while",
"with",
"yield",
}
# This is a set of R reserved words that cannot be used as function
# argument names.
#
# Reserved words can be obtained from R's help pages by executing the
# statement below:
# > ?reserved
r_keywords = {
"if",
"else",
"repeat",
"while",
"function",
"for",
"in",
"next",
"break",
"TRUE",
"FALSE",
"NULL",
"Inf",
"NaN",
"NA",
"NA_integer_",
"NA_real_",
"NA_complex_",
"NA_character_",
"...",
}
# This is a set of Julia reserved words that cannot be used as function
# argument names.
julia_keywords = {
"baremodule",
"begin",
"break",
"catch",
"const",
"continue",
"do",
"else",
"elseif",
"end",
"export",
"false",
"finally",
"for",
"function",
"global",
"if",
"import",
"let",
"local",
"macro",
"module",
"quote",
"return",
"struct",
"true",
"try",
"using",
"while",
}

View File

@@ -0,0 +1,78 @@
def is_node(value):
return value in ("node", "element")
def is_shape(value):
return value in ("shape", "exact")
def collect_array(a_value, base, nodes):
a_type = a_value["name"]
if is_node(a_type):
nodes.append(base)
elif a_type in ("shape", "exact"):
nodes = collect_nodes(a_value["value"], base + "[]", nodes)
elif a_type == "union":
nodes = collect_union(a_value["value"], base + "[]", nodes)
elif a_type == "objectOf":
nodes = collect_object(a_value["value"], base + "[]", nodes)
return nodes
def collect_union(type_list, base, nodes):
for t in type_list:
if is_node(t["name"]):
nodes.append(base)
elif is_shape(t["name"]):
nodes = collect_nodes(t["value"], base, nodes)
elif t["name"] == "arrayOf":
nodes = collect_array(t["value"], base, nodes)
elif t["name"] == "objectOf":
nodes = collect_object(t["value"], base, nodes)
return nodes
def collect_object(o_value, base, nodes):
o_name = o_value.get("name")
o_key = base + "{}"
if is_node(o_name):
nodes.append(o_key)
elif is_shape(o_name):
nodes = collect_nodes(o_value.get("value", {}), o_key, nodes)
elif o_name == "union":
nodes = collect_union(o_value.get("value"), o_key, nodes)
elif o_name == "arrayOf":
nodes = collect_array(o_value, o_key, nodes)
return nodes
def collect_nodes(metadata, base="", nodes=None):
nodes = nodes or []
for prop_name, value in metadata.items():
# Support for recursive shapes, the type is directly in the field.
t_value = value.get("type", value)
p_type = t_value.get("name")
if base:
key = f"{base}.{prop_name}"
else:
key = prop_name
if is_node(p_type):
nodes.append(key)
elif p_type == "arrayOf":
a_value = t_value.get("value", t_value)
nodes = collect_array(a_value, key, nodes)
elif is_shape(p_type):
nodes = collect_nodes(t_value["value"], key, nodes)
elif p_type == "union":
nodes = collect_union(t_value["value"], key, nodes)
elif p_type == "objectOf":
o_value = t_value.get("value", {})
nodes = collect_object(o_value, key, nodes)
return nodes
def filter_base_nodes(nodes):
return [n for n in nodes if not any(e in n for e in ("[]", ".", "{}"))]

View File

@@ -0,0 +1,548 @@
# pylint: disable=consider-using-f-string
import copy
import os
import shutil
import warnings
import sys
import importlib
import uuid
import hashlib
from ._all_keywords import julia_keywords
from ._py_components_generation import reorder_props
# uuid of DashBase Julia package.
jl_dash_base_uuid = "03207cf0-e2b3-4b91-9ca8-690cf0fb507e"
# uuid of Dash Julia package. Used as base for component package uuid
jl_dash_uuid = "1b08a953-4be3-4667-9a23-3db579824955"
# Declaring longer string templates as globals to improve
# readability, make method logic clearer to anyone inspecting
# code below
jl_component_string = '''
export {funcname}
"""
{funcname}(;kwargs...){children_signatures}
{docstring}
"""
function {funcname}(; kwargs...)
available_props = Symbol[{component_props}]
wild_props = Symbol[{wildcard_symbols}]
return Component("{funcname}", "{element_name}", "{module_name}", available_props, wild_props; kwargs...)
end
{children_definitions}
''' # noqa:E501
jl_children_signatures = """
{funcname}(children::Any;kwargs...)
{funcname}(children_maker::Function;kwargs...)
"""
jl_children_definitions = """
{funcname}(children::Any; kwargs...) = {funcname}(;kwargs..., children = children)
{funcname}(children_maker::Function; kwargs...) = {funcname}(children_maker(); kwargs...)
"""
jl_package_file_string = """
module {package_name}
using {base_package}
const resources_path = realpath(joinpath( @__DIR__, "..", "deps"))
const version = "{version}"
{component_includes}
function __init__()
DashBase.register_package(
DashBase.ResourcePkg(
"{project_shortname}",
resources_path,
version = version,
[
{resources_dist}
]
)
)
end
end
"""
jl_projecttoml_string = """
name = "{package_name}"
uuid = "{package_uuid}"
{authors}version = "{version}"
[deps]
{base_package} = "{dash_uuid}"
[compat]
julia = "1.2"
{base_package} = "{base_version}"
"""
jl_base_version = {
"Dash": "0.1.3, 1.0",
"DashBase": "0.1",
}
jl_component_include_string = 'include("jl/{name}.jl")'
jl_resource_tuple_string = """DashBase.Resource(
relative_package_path = {relative_package_path},
external_url = {external_url},
dynamic = {dynamic},
async = {async_string},
type = :{type}
)"""
core_packages = ["dash_html_components", "dash_core_components", "dash_table"]
def jl_package_name(namestring):
s = namestring.split("_")
return "".join(w.capitalize() for w in s)
def stringify_wildcards(wclist, no_symbol=False):
if no_symbol:
wcstring = "|".join("{}-".format(item) for item in wclist)
else:
wcstring = ", ".join('Symbol("{}-")'.format(item) for item in wclist)
return wcstring
def get_wildcards_jl(props):
return [key.replace("-*", "") for key in props if key.endswith("-*")]
def get_jl_prop_types(type_object):
"""Mapping from the PropTypes js type object to the Julia type."""
def shape_or_exact():
return "lists containing elements {}.\n{}".format(
", ".join("'{}'".format(t) for t in type_object["value"]),
"Those elements have the following types:\n{}".format(
"\n".join(
create_prop_docstring_jl(
prop_name=prop_name,
type_object=prop,
required=prop["required"],
description=prop.get("description", ""),
indent_num=1,
)
for prop_name, prop in type_object["value"].items()
)
),
)
return dict(
array=lambda: "Array",
bool=lambda: "Bool",
number=lambda: "Real",
string=lambda: "String",
object=lambda: "Dict",
any=lambda: "Bool | Real | String | Dict | Array",
element=lambda: "dash component",
node=lambda: "a list of or a singular dash component, string or number",
# React's PropTypes.oneOf
enum=lambda: "a value equal to: {}".format(
", ".join("{}".format(str(t["value"])) for t in type_object["value"])
),
# React's PropTypes.oneOfType
union=lambda: "{}".format(
" | ".join(
"{}".format(get_jl_type(subType))
for subType in type_object["value"]
if get_jl_type(subType) != ""
)
),
# React's PropTypes.arrayOf
arrayOf=lambda: (
"Array"
+ (
" of {}s".format(get_jl_type(type_object["value"]))
if get_jl_type(type_object["value"]) != ""
else ""
)
),
# React's PropTypes.objectOf
objectOf=lambda: "Dict with Strings as keys and values of type {}".format(
get_jl_type(type_object["value"])
),
# React's PropTypes.shape
shape=shape_or_exact,
# React's PropTypes.exact
exact=shape_or_exact,
)
def filter_props(props):
"""Filter props from the Component arguments to exclude:
- Those without a "type" or a "flowType" field
- Those with arg.type.name in {'func', 'symbol', 'instanceOf'}
Parameters
----------
props: dict
Dictionary with {propName: propMetadata} structure
Returns
-------
dict
Filtered dictionary with {propName: propMetadata} structure
"""
filtered_props = copy.deepcopy(props)
for arg_name, arg in list(filtered_props.items()):
if "type" not in arg and "flowType" not in arg:
filtered_props.pop(arg_name)
continue
# Filter out functions and instances --
if "type" in arg: # These come from PropTypes
arg_type = arg["type"]["name"]
if arg_type in {"func", "symbol", "instanceOf"}:
filtered_props.pop(arg_name)
elif "flowType" in arg: # These come from Flow & handled differently
arg_type_name = arg["flowType"]["name"]
if arg_type_name == "signature":
# This does the same as the PropTypes filter above, but "func"
# is under "type" if "name" is "signature" vs just in "name"
if "type" not in arg["flowType"] or arg["flowType"]["type"] != "object":
filtered_props.pop(arg_name)
else:
raise ValueError
return filtered_props
def get_jl_type(type_object):
"""
Convert JS types to Julia types for the component definition
Parameters
----------
type_object: dict
react-docgen-generated prop type dictionary
Returns
-------
str
Julia type string
"""
js_type_name = type_object["name"]
js_to_jl_types = get_jl_prop_types(type_object=type_object)
if js_type_name in js_to_jl_types:
prop_type = js_to_jl_types[js_type_name]()
return prop_type
return ""
def print_jl_type(typedata):
typestring = get_jl_type(typedata).capitalize()
if typestring:
typestring += ". "
return typestring
def create_docstring_jl(component_name, props, description):
"""Create the Dash component docstring.
Parameters
----------
component_name: str
Component name
props: dict
Dictionary with {propName: propMetadata} structure
description: str
Component description
Returns
-------
str
Dash component docstring
"""
# Ensure props are ordered with children first
props = reorder_props(props=props)
return "A{n} {name} component.\n{description}\nKeyword arguments:\n{args}".format(
n="n" if component_name[0].lower() in "aeiou" else "",
name=component_name,
description=description,
args="\n".join(
create_prop_docstring_jl(
prop_name=p,
type_object=prop["type"] if "type" in prop else prop["flowType"],
required=prop["required"],
description=prop["description"],
indent_num=0,
)
for p, prop in filter_props(props).items()
),
)
def create_prop_docstring_jl(
prop_name,
type_object,
required,
description,
indent_num,
):
"""
Create the Dash component prop docstring
Parameters
----------
prop_name: str
Name of the Dash component prop
type_object: dict
react-docgen-generated prop type dictionary
required: bool
Component is required?
description: str
Dash component description
indent_num: int
Number of indents to use for the context block
(creates 2 spaces for every indent)
is_flow_type: bool
Does the prop use Flow types? Otherwise, uses PropTypes
Returns
-------
str
Dash component prop docstring
"""
jl_type_name = get_jl_type(type_object=type_object)
indent_spacing = " " * indent_num
if "\n" in jl_type_name:
return (
"{indent_spacing}- `{name}` ({is_required}): {description}. "
"{name} has the following type: {type}".format(
indent_spacing=indent_spacing,
name=prop_name,
type=jl_type_name,
description=description,
is_required="required" if required else "optional",
)
)
return "{indent_spacing}- `{name}` ({type}{is_required}){description}".format(
indent_spacing=indent_spacing,
name=prop_name,
type="{}; ".format(jl_type_name) if jl_type_name else "",
description=(": {}".format(description) if description != "" else ""),
is_required="required" if required else "optional",
)
# this logic will permit passing blank Julia prefixes to
# dash-generate-components, while also enforcing
# lower case names for the resulting functions; if a prefix
# is supplied, leave it as-is
def format_fn_name(prefix, name):
if prefix:
return "{}_{}".format(prefix, name.lower())
return name.lower()
def generate_metadata_strings(resources, metatype):
def nothing_or_string(v):
return '"{}"'.format(v) if v else "nothing"
return [
jl_resource_tuple_string.format(
relative_package_path=nothing_or_string(
resource.get("relative_package_path", "")
),
external_url=nothing_or_string(resource.get("external_url", "")),
dynamic=str(resource.get("dynamic", "nothing")).lower(),
type=metatype,
async_string=":{}".format(str(resource.get("async")).lower())
if "async" in resource.keys()
else "nothing",
)
for resource in resources
]
def is_core_package(project_shortname):
return project_shortname in core_packages
def base_package_name(project_shortname):
return "DashBase" if is_core_package(project_shortname) else "Dash"
def base_package_uid(project_shortname):
return jl_dash_base_uuid if is_core_package(project_shortname) else jl_dash_uuid
def generate_package_file(project_shortname, components, pkg_data, prefix):
package_name = jl_package_name(project_shortname)
sys.path.insert(0, os.getcwd())
mod = importlib.import_module(project_shortname)
js_dist = getattr(mod, "_js_dist", [])
css_dist = getattr(mod, "_css_dist", [])
project_ver = pkg_data.get("version")
resources_dist = ",\n".join(
generate_metadata_strings(js_dist, "js")
+ generate_metadata_strings(css_dist, "css")
)
package_string = jl_package_file_string.format(
package_name=package_name,
component_includes="\n".join(
[
jl_component_include_string.format(
name=format_fn_name(prefix, comp_name)
)
for comp_name in components
]
),
resources_dist=resources_dist,
version=project_ver,
project_shortname=project_shortname,
base_package=base_package_name(project_shortname),
)
file_path = os.path.join("src", package_name + ".jl")
with open(file_path, "w", encoding="utf-8") as f:
f.write(package_string)
print("Generated {}".format(file_path))
def generate_toml_file(project_shortname, pkg_data):
package_author = pkg_data.get("author", "")
project_ver = pkg_data.get("version")
package_name = jl_package_name(project_shortname)
u = uuid.UUID(jl_dash_uuid)
package_uuid = uuid.UUID(
hex=u.hex[:-12] + hashlib.md5(package_name.encode("utf-8")).hexdigest()[-12:]
)
authors_string = (
'authors = ["{}"]\n'.format(package_author) if package_author else ""
)
base_package = base_package_name(project_shortname)
toml_string = jl_projecttoml_string.format(
package_name=package_name,
package_uuid=package_uuid,
version=project_ver,
authors=authors_string,
base_package=base_package,
base_version=jl_base_version[base_package],
dash_uuid=base_package_uid(project_shortname),
)
file_path = "Project.toml"
with open(file_path, "w", encoding="utf-8") as f:
f.write(toml_string)
print("Generated {}".format(file_path))
def generate_class_string(name, props, description, project_shortname, prefix):
# Ensure props are ordered with children first
filtered_props = reorder_props(filter_props(props))
prop_keys = list(filtered_props.keys())
docstring = (
create_docstring_jl(
component_name=name, props=filtered_props, description=description
)
.replace("\r\n", "\n")
.replace("$", "\\$")
)
wclist = get_wildcards_jl(props)
default_paramtext = ""
# Filter props to remove those we don't want to expose
for item in prop_keys[:]:
if item.endswith("-*") or item == "setProps":
prop_keys.remove(item)
elif item in julia_keywords:
prop_keys.remove(item)
warnings.warn(
(
'WARNING: prop "{}" in component "{}" is a Julia keyword'
" - REMOVED FROM THE JULIA COMPONENT"
).format(item, name)
)
default_paramtext += ", ".join(":{}".format(p) for p in prop_keys)
has_children = "children" in prop_keys
funcname = format_fn_name(prefix, name)
children_signatures = (
jl_children_signatures.format(funcname=funcname) if has_children else ""
)
children_definitions = (
jl_children_definitions.format(funcname=funcname) if has_children else ""
)
return jl_component_string.format(
funcname=format_fn_name(prefix, name),
docstring=docstring,
component_props=default_paramtext,
wildcard_symbols=stringify_wildcards(wclist, no_symbol=False),
wildcard_names=stringify_wildcards(wclist, no_symbol=True),
element_name=name,
module_name=project_shortname,
children_signatures=children_signatures,
children_definitions=children_definitions,
)
def generate_struct_file(name, props, description, project_shortname, prefix):
props = reorder_props(props=props)
import_string = "# AUTO GENERATED FILE - DO NOT EDIT\n"
class_string = generate_class_string(
name, props, description, project_shortname, prefix
)
file_name = format_fn_name(prefix, name) + ".jl"
# put component files in src/jl subdir,
# this also creates the Julia source directory for the package
# if it is missing
if not os.path.exists("src/jl"):
os.makedirs("src/jl")
file_path = os.path.join("src", "jl", file_name)
with open(file_path, "w", encoding="utf-8") as f:
f.write(import_string)
f.write(class_string)
print("Generated {}".format(file_name))
# pylint: disable=unused-argument
def generate_module(
project_shortname, components, metadata, pkg_data, prefix, **kwargs
):
# copy over all JS dependencies from the (Python) components dir
# the inst/lib directory for the package won't exist on first call
# create this directory if it is missing
if os.path.exists("deps"):
shutil.rmtree("deps")
os.makedirs("deps")
for rel_dirname, _, filenames in os.walk(project_shortname):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension in [".py", ".pyc", ".json"]:
continue
target_dirname = os.path.join(
"deps/", os.path.relpath(rel_dirname, project_shortname)
)
if not os.path.exists(target_dirname):
os.makedirs(target_dirname)
shutil.copy(os.path.join(rel_dirname, filename), target_dirname)
generate_package_file(project_shortname, components, pkg_data, prefix)
generate_toml_file(project_shortname, pkg_data)

View File

@@ -0,0 +1,691 @@
from collections import OrderedDict
import copy
import os
from textwrap import fill, dedent
from dash.development.base_component import _explicitize_args
from dash.exceptions import NonExistentEventException
from ._all_keywords import python_keywords
from ._collect_nodes import collect_nodes, filter_base_nodes
from .base_component import Component
# pylint: disable=unused-argument,too-many-locals
def generate_class_string(
typename,
props,
description,
namespace,
prop_reorder_exceptions=None,
max_props=None,
):
"""Dynamically generate class strings to have nicely formatted docstrings,
keyword arguments, and repr.
Inspired by http://jameso.be/2013/08/06/namedtuple.html
Parameters
----------
typename
props
description
namespace
prop_reorder_exceptions
Returns
-------
string
"""
# TODO _prop_names, _type, _namespace, and available_properties
# can be modified by a Dash JS developer via setattr
# TODO - Tab out the repr for the repr of these components to make it
# look more like a hierarchical tree
# TODO - Include "description" "defaultValue" in the repr and docstring
#
# TODO - Handle "required"
#
# TODO - How to handle user-given `null` values? I want to include
# an expanded docstring like Dropdown(value=None, id=None)
# but by templating in those None values, I have no way of knowing
# whether a property is None because the user explicitly wanted
# it to be `null` or whether that was just the default value.
# The solution might be to deal with default values better although
# not all component authors will supply those.
c = '''class {typename}(Component):
"""{docstring}"""
_children_props = {children_props}
_base_nodes = {base_nodes}
_namespace = '{namespace}'
_type = '{typename}'
@_explicitize_args
def __init__(self, {default_argtext}):
self._prop_names = {list_of_valid_keys}
self._valid_wildcard_attributes =\
{list_of_valid_wildcard_attr_prefixes}
self.available_properties = {list_of_valid_keys}
self.available_wildcard_properties =\
{list_of_valid_wildcard_attr_prefixes}
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs and excess named props
args = {args}
{required_validation}
super({typename}, self).__init__({argtext})
'''
filtered_props = (
filter_props(props)
if (prop_reorder_exceptions is not None and typename in prop_reorder_exceptions)
or (prop_reorder_exceptions is not None and "ALL" in prop_reorder_exceptions)
else reorder_props(filter_props(props))
)
wildcard_prefixes = repr(parse_wildcards(props))
list_of_valid_keys = repr(list(map(str, filtered_props.keys())))
docstring = create_docstring(
component_name=typename,
props=filtered_props,
description=description,
prop_reorder_exceptions=prop_reorder_exceptions,
).replace("\r\n", "\n")
required_args = required_props(filtered_props)
is_children_required = "children" in required_args
required_args = [arg for arg in required_args if arg != "children"]
prohibit_events(props)
# pylint: disable=unused-variable
prop_keys = list(props.keys())
if "children" in props and "children" in list_of_valid_keys:
prop_keys.remove("children")
default_argtext = "children=None, "
args = "{k: _locals[k] for k in _explicit_args if k != 'children'}"
argtext = "children=children, **args"
else:
default_argtext = ""
args = "{k: _locals[k] for k in _explicit_args}"
argtext = "**args"
if len(required_args) == 0:
required_validation = ""
else:
required_validation = f"""
for k in {required_args}:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
"""
if is_children_required:
required_validation += """
if 'children' not in _explicit_args:
raise TypeError('Required argument children was not specified.')
"""
default_arglist = [
(
f"{p:s}=Component.REQUIRED"
if props[p]["required"]
else f"{p:s}=Component.UNDEFINED"
)
for p in prop_keys
if not p.endswith("-*") and p not in python_keywords and p != "setProps"
]
if max_props:
final_max_props = max_props - (1 if "children" in props else 0)
if len(default_arglist) > final_max_props:
default_arglist = default_arglist[:final_max_props]
docstring += (
"\n\n"
"Note: due to the large number of props for this component,\n"
"not all of them appear in the constructor signature, but\n"
"they may still be used as keyword arguments."
)
default_argtext += ", ".join(default_arglist + ["**kwargs"])
nodes = collect_nodes({k: v for k, v in props.items() if k != "children"})
return dedent(
c.format(
typename=typename,
namespace=namespace,
filtered_props=filtered_props,
list_of_valid_wildcard_attr_prefixes=wildcard_prefixes,
list_of_valid_keys=list_of_valid_keys,
docstring=docstring,
default_argtext=default_argtext,
args=args,
argtext=argtext,
required_validation=required_validation,
children_props=nodes,
base_nodes=filter_base_nodes(nodes) + ["children"],
)
)
def generate_class_file(
typename,
props,
description,
namespace,
prop_reorder_exceptions=None,
max_props=None,
):
"""Generate a Python class file (.py) given a class string.
Parameters
----------
typename
props
description
namespace
prop_reorder_exceptions
Returns
-------
"""
import_string = (
"# AUTO GENERATED FILE - DO NOT EDIT\n\n"
+ "from dash.development.base_component import "
+ "Component, _explicitize_args\n\n\n"
)
class_string = generate_class_string(
typename, props, description, namespace, prop_reorder_exceptions, max_props
)
file_name = f"{typename:s}.py"
file_path = os.path.join(namespace, file_name)
with open(file_path, "w", encoding="utf-8") as f:
f.write(import_string)
f.write(class_string)
print(f"Generated {file_name}")
def generate_imports(project_shortname, components):
with open(
os.path.join(project_shortname, "_imports_.py"), "w", encoding="utf-8"
) as f:
component_imports = "\n".join(f"from .{x} import {x}" for x in components)
all_list = ",\n".join(f' "{x}"' for x in components)
imports_string = f"{component_imports}\n\n__all__ = [\n{all_list}\n]"
f.write(imports_string)
def generate_classes_files(project_shortname, metadata, *component_generators):
components = []
for component_path, component_data in metadata.items():
component_name = component_path.split("/")[-1].split(".")[0]
components.append(component_name)
for generator in component_generators:
generator(
component_name,
component_data["props"],
component_data["description"],
project_shortname,
)
return components
def generate_class(
typename, props, description, namespace, prop_reorder_exceptions=None
):
"""Generate a Python class object given a class string.
Parameters
----------
typename
props
description
namespace
Returns
-------
"""
string = generate_class_string(
typename, props, description, namespace, prop_reorder_exceptions
)
scope = {"Component": Component, "_explicitize_args": _explicitize_args}
# pylint: disable=exec-used
exec(string, scope)
result = scope[typename]
return result
def required_props(props):
"""Pull names of required props from the props object.
Parameters
----------
props: dict
Returns
-------
list
List of prop names (str) that are required for the Component
"""
return [prop_name for prop_name, prop in list(props.items()) if prop["required"]]
def create_docstring(component_name, props, description, prop_reorder_exceptions=None):
"""Create the Dash component docstring.
Parameters
----------
component_name: str
Component name
props: dict
Dictionary with {propName: propMetadata} structure
description: str
Component description
Returns
-------
str
Dash component docstring
"""
# Ensure props are ordered with children first
props = (
props
if (
prop_reorder_exceptions is not None
and component_name in prop_reorder_exceptions
)
or (prop_reorder_exceptions is not None and "ALL" in prop_reorder_exceptions)
else reorder_props(props)
)
n = "n" if component_name[0].lower() in "aeiou" else ""
args = "\n".join(
create_prop_docstring(
prop_name=p,
type_object=prop["type"] if "type" in prop else prop["flowType"],
required=prop["required"],
description=prop["description"],
default=prop.get("defaultValue"),
indent_num=0,
is_flow_type="flowType" in prop and "type" not in prop,
)
for p, prop in filter_props(props).items()
)
return (
f"A{n} {component_name} component.\n{description}\n\nKeyword arguments:\n{args}"
)
def prohibit_events(props):
"""Events have been removed. Raise an error if we see dashEvents or
fireEvents.
Parameters
----------
props: dict
Dictionary with {propName: propMetadata} structure
Raises
-------
?
"""
if "dashEvents" in props or "fireEvents" in props:
raise NonExistentEventException(
"Events are no longer supported by dash. Use properties instead, "
"eg `n_clicks` instead of a `click` event."
)
def parse_wildcards(props):
"""Pull out the wildcard attributes from the Component props.
Parameters
----------
props: dict
Dictionary with {propName: propMetadata} structure
Returns
-------
list
List of Dash valid wildcard prefixes
"""
list_of_valid_wildcard_attr_prefixes = []
for wildcard_attr in ["data-*", "aria-*"]:
if wildcard_attr in props:
list_of_valid_wildcard_attr_prefixes.append(wildcard_attr[:-1])
return list_of_valid_wildcard_attr_prefixes
def reorder_props(props):
"""If "children" is in props, then move it to the front to respect dash
convention, then 'id', then the remaining props sorted by prop name
Parameters
----------
props: dict
Dictionary with {propName: propMetadata} structure
Returns
-------
dict
Dictionary with {propName: propMetadata} structure
"""
# Constructing an OrderedDict with duplicate keys, you get the order
# from the first one but the value from the last.
# Doing this to avoid mutating props, which can cause confusion.
props1 = [("children", "")] if "children" in props else []
props2 = [("id", "")] if "id" in props else []
return OrderedDict(props1 + props2 + sorted(list(props.items())))
def filter_props(props):
"""Filter props from the Component arguments to exclude:
- Those without a "type" or a "flowType" field
- Those with arg.type.name in {'func', 'symbol', 'instanceOf'}
Parameters
----------
props: dict
Dictionary with {propName: propMetadata} structure
Returns
-------
dict
Filtered dictionary with {propName: propMetadata} structure
Examples
--------
```python
prop_args = {
'prop1': {
'type': {'name': 'bool'},
'required': False,
'description': 'A description',
'flowType': {},
'defaultValue': {'value': 'false', 'computed': False},
},
'prop2': {'description': 'A prop without a type'},
'prop3': {
'type': {'name': 'func'},
'description': 'A function prop',
},
}
# filtered_prop_args is now
# {
# 'prop1': {
# 'type': {'name': 'bool'},
# 'required': False,
# 'description': 'A description',
# 'flowType': {},
# 'defaultValue': {'value': 'false', 'computed': False},
# },
# }
filtered_prop_args = filter_props(prop_args)
```
"""
filtered_props = copy.deepcopy(props)
for arg_name, arg in list(filtered_props.items()):
if "type" not in arg and "flowType" not in arg:
filtered_props.pop(arg_name)
continue
# Filter out functions and instances --
# these cannot be passed from Python
if "type" in arg: # These come from PropTypes
arg_type = arg["type"]["name"]
if arg_type in {"func", "symbol", "instanceOf"}:
filtered_props.pop(arg_name)
elif "flowType" in arg: # These come from Flow & handled differently
arg_type_name = arg["flowType"]["name"]
if arg_type_name == "signature":
# This does the same as the PropTypes filter above, but "func"
# is under "type" if "name" is "signature" vs just in "name"
if "type" not in arg["flowType"] or arg["flowType"]["type"] != "object":
filtered_props.pop(arg_name)
else:
raise ValueError
return filtered_props
def fix_keywords(txt):
"""
replaces javascript keywords true, false, null with Python keywords
"""
fix_word = {"true": "True", "false": "False", "null": "None"}
for js_keyword, python_keyword in fix_word.items():
txt = txt.replace(js_keyword, python_keyword)
return txt
# pylint: disable=too-many-arguments
# pylint: disable=too-many-locals
def create_prop_docstring(
prop_name,
type_object,
required,
description,
default,
indent_num,
is_flow_type=False,
):
"""Create the Dash component prop docstring.
Parameters
----------
prop_name: str
Name of the Dash component prop
type_object: dict
react-docgen-generated prop type dictionary
required: bool
Component is required?
description: str
Dash component description
default: dict
Either None if a default value is not defined, or
dict containing the key 'value' that defines a
default value for the prop
indent_num: int
Number of indents to use for the context block
(creates 2 spaces for every indent)
is_flow_type: bool
Does the prop use Flow types? Otherwise, uses PropTypes
Returns
-------
str
Dash component prop docstring
"""
py_type_name = js_to_py_type(
type_object=type_object, is_flow_type=is_flow_type, indent_num=indent_num
)
indent_spacing = " " * indent_num
default = default["value"] if default else ""
default = fix_keywords(default)
is_required = "optional"
if required:
is_required = "required"
elif default and default not in ["None", "{}", "[]"]:
is_required = "default " + default.replace("\n", "")
# formats description
period = "." if description else ""
description = description.strip().strip(".").replace('"', r"\"") + period
desc_indent = indent_spacing + " "
description = fill(
description,
initial_indent=desc_indent,
subsequent_indent=desc_indent,
break_long_words=False,
break_on_hyphens=False,
)
description = f"\n{description}" if description else ""
colon = ":" if description else ""
description = fix_keywords(description)
if "\n" in py_type_name:
# corrects the type
dict_or_list = "list of dicts" if py_type_name.startswith("list") else "dict"
# format and rewrite the intro to the nested dicts
intro1, intro2, dict_descr = py_type_name.partition("with keys:")
intro = f"`{prop_name}` is a {intro1}{intro2}"
intro = fill(
intro,
initial_indent=desc_indent,
subsequent_indent=desc_indent,
break_long_words=False,
break_on_hyphens=False,
)
# captures optional nested dict description and puts the "or" condition on a new line
if "| dict with keys:" in dict_descr:
dict_part1, dict_part2 = dict_descr.split(" |", 1)
dict_part2 = "".join([desc_indent, "Or", dict_part2])
dict_descr = f"{dict_part1}\n\n {dict_part2}"
# ensures indent is correct if there is a second nested list of dicts
current_indent = dict_descr.lstrip("\n").find("-")
if current_indent == len(indent_spacing):
dict_descr = "".join(
"\n\n " + line for line in dict_descr.splitlines() if line != ""
)
return (
f"\n{indent_spacing}- {prop_name} ({dict_or_list}; {is_required}){colon}"
f"{description}"
f"\n\n{intro}{dict_descr}"
)
tn = f"{py_type_name}; " if py_type_name else ""
return f"\n{indent_spacing}- {prop_name} ({tn}{is_required}){colon}{description}"
def map_js_to_py_types_prop_types(type_object, indent_num):
"""Mapping from the PropTypes js type object to the Python type."""
def shape_or_exact():
return "dict with keys:\n" + "\n".join(
create_prop_docstring(
prop_name=prop_name,
type_object=prop,
required=prop["required"],
description=prop.get("description", ""),
default=prop.get("defaultValue"),
indent_num=indent_num + 2,
)
for prop_name, prop in sorted(list(type_object["value"].items()))
)
def array_of():
inner = js_to_py_type(type_object["value"])
if inner:
return "list of " + (
inner + "s"
if inner.split(" ")[0] != "dict"
else inner.replace("dict", "dicts", 1)
)
return "list"
def tuple_of():
elements = [js_to_py_type(element) for element in type_object["elements"]]
return f"list of {len(elements)} elements: [{', '.join(elements)}]"
return dict(
array=lambda: "list",
bool=lambda: "boolean",
number=lambda: "number",
string=lambda: "string",
object=lambda: "dict",
any=lambda: "boolean | number | string | dict | list",
element=lambda: "dash component",
node=lambda: "a list of or a singular dash component, string or number",
# React's PropTypes.oneOf
enum=lambda: (
"a value equal to: "
+ ", ".join(str(t["value"]) for t in type_object["value"])
),
# React's PropTypes.oneOfType
union=lambda: " | ".join(
js_to_py_type(subType)
for subType in type_object["value"]
if js_to_py_type(subType) != ""
),
# React's PropTypes.arrayOf
arrayOf=array_of,
# React's PropTypes.objectOf
objectOf=lambda: (
"dict with strings as keys and values of type "
+ js_to_py_type(type_object["value"])
),
# React's PropTypes.shape
shape=shape_or_exact,
# React's PropTypes.exact
exact=shape_or_exact,
tuple=tuple_of,
)
def map_js_to_py_types_flow_types(type_object):
"""Mapping from the Flow js types to the Python type."""
return dict(
array=lambda: "list",
boolean=lambda: "boolean",
number=lambda: "number",
string=lambda: "string",
Object=lambda: "dict",
any=lambda: "bool | number | str | dict | list",
Element=lambda: "dash component",
Node=lambda: "a list of or a singular dash component, string or number",
# React's PropTypes.oneOfType
union=lambda: " | ".join(
js_to_py_type(subType)
for subType in type_object["elements"]
if js_to_py_type(subType) != ""
),
# Flow's Array type
Array=lambda: "list"
+ (
f' of {js_to_py_type(type_object["elements"][0])}s'
if js_to_py_type(type_object["elements"][0]) != ""
else ""
),
# React's PropTypes.shape
signature=lambda indent_num: (
"dict with keys:\n"
+ "\n".join(
create_prop_docstring(
prop_name=prop["key"],
type_object=prop["value"],
required=prop["value"]["required"],
description=prop["value"].get("description", ""),
default=prop.get("defaultValue"),
indent_num=indent_num + 2,
is_flow_type=True,
)
for prop in type_object["signature"]["properties"]
)
),
)
def js_to_py_type(type_object, is_flow_type=False, indent_num=0):
"""Convert JS types to Python types for the component definition.
Parameters
----------
type_object: dict
react-docgen-generated prop type dictionary
is_flow_type: bool
Does the prop use Flow types? Otherwise, uses PropTypes
indent_num: int
Number of indents to use for the docstring for the prop
Returns
-------
str
Python type string
"""
js_type_name = type_object["name"]
js_to_py_types = (
map_js_to_py_types_flow_types(type_object=type_object)
if is_flow_type
else map_js_to_py_types_prop_types(
type_object=type_object, indent_num=indent_num
)
)
if (
"computed" in type_object
and type_object["computed"]
or type_object.get("type", "") == "function"
):
return ""
if js_type_name in js_to_py_types:
if js_type_name == "signature": # This is a Flow object w/ signature
return js_to_py_types[js_type_name](indent_num)
# All other types
return js_to_py_types[js_type_name]()
return ""

View File

@@ -0,0 +1,429 @@
import abc
import collections
import inspect
import sys
import uuid
import random
from .._utils import patch_collections_abc, stringify_id, OrderedSet
MutableSequence = patch_collections_abc("MutableSequence")
rd = random.Random(0)
# pylint: disable=no-init,too-few-public-methods
class ComponentRegistry:
"""Holds a registry of the namespaces used by components."""
registry = OrderedSet()
children_props = collections.defaultdict(dict)
@classmethod
def get_resources(cls, resource_name):
resources = []
for module_name in cls.registry:
module = sys.modules[module_name]
resources.extend(getattr(module, resource_name, []))
return resources
class ComponentMeta(abc.ABCMeta):
# pylint: disable=arguments-differ
def __new__(mcs, name, bases, attributes):
component = abc.ABCMeta.__new__(mcs, name, bases, attributes)
module = attributes["__module__"].split(".")[0]
if name == "Component" or module == "builtins":
# Don't do the base component
# and the components loaded dynamically by load_component
# as it doesn't have the namespace.
return component
ComponentRegistry.registry.add(module)
ComponentRegistry.children_props[attributes.get("_namespace", module)][
name
] = attributes.get("_children_props")
return component
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def _check_if_has_indexable_children(item):
if not hasattr(item, "children") or (
not isinstance(item.children, Component)
and not isinstance(item.children, (tuple, MutableSequence))
):
raise KeyError
class Component(metaclass=ComponentMeta):
_children_props = []
_base_nodes = ["children"]
class _UNDEFINED:
def __repr__(self):
return "undefined"
def __str__(self):
return "undefined"
UNDEFINED = _UNDEFINED()
class _REQUIRED:
def __repr__(self):
return "required"
def __str__(self):
return "required"
REQUIRED = _REQUIRED()
def __init__(self, **kwargs):
import dash # pylint: disable=import-outside-toplevel, cyclic-import
# pylint: disable=super-init-not-called
for k, v in list(kwargs.items()):
# pylint: disable=no-member
k_in_propnames = k in self._prop_names
k_in_wildcards = any(
k.startswith(w) for w in self._valid_wildcard_attributes
)
# e.g. "The dash_core_components.Dropdown component (version 1.6.0)
# with the ID "my-dropdown"
id_suffix = f' with the ID "{kwargs["id"]}"' if "id" in kwargs else ""
try:
# Get fancy error strings that have the version numbers
error_string_prefix = "The `{}.{}` component (version {}){}"
# These components are part of dash now, so extract the dash version:
dash_packages = {
"dash_html_components": "html",
"dash_core_components": "dcc",
"dash_table": "dash_table",
}
if self._namespace in dash_packages:
error_string_prefix = error_string_prefix.format(
dash_packages[self._namespace],
self._type,
dash.__version__,
id_suffix,
)
else:
# Otherwise import the package and extract the version number
error_string_prefix = error_string_prefix.format(
self._namespace,
self._type,
getattr(__import__(self._namespace), "__version__", "unknown"),
id_suffix,
)
except ImportError:
# Our tests create mock components with libraries that
# aren't importable
error_string_prefix = f"The `{self._type}` component{id_suffix}"
if not k_in_propnames and not k_in_wildcards:
allowed_args = ", ".join(
sorted(self._prop_names)
) # pylint: disable=no-member
raise TypeError(
f"{error_string_prefix} received an unexpected keyword argument: `{k}`"
f"\nAllowed arguments: {allowed_args}"
)
if k not in self._base_nodes and isinstance(v, Component):
raise TypeError(
error_string_prefix
+ " detected a Component for a prop other than `children`\n"
+ f"Prop {k} has value {v!r}\n\n"
+ "Did you forget to wrap multiple `children` in an array?\n"
+ 'For example, it must be html.Div(["a", "b", "c"]) not html.Div("a", "b", "c")\n'
)
if k == "id":
if isinstance(v, dict):
for id_key, id_val in v.items():
if not isinstance(id_key, str):
raise TypeError(
"dict id keys must be strings,\n"
+ f"found {id_key!r} in id {v!r}"
)
if not isinstance(id_val, (str, int, float, bool)):
raise TypeError(
"dict id values must be strings, numbers or bools,\n"
+ f"found {id_val!r} in id {v!r}"
)
elif not isinstance(v, str):
raise TypeError(f"`id` prop must be a string or dict, not {v!r}")
setattr(self, k, v)
def _set_random_id(self):
if hasattr(self, "id"):
return getattr(self, "id")
kind = f"`{self._namespace}.{self._type}`" # pylint: disable=no-member
if getattr(self, "persistence", False):
raise RuntimeError(
f"""
Attempting to use an auto-generated ID with the `persistence` prop.
This is prohibited because persistence is tied to component IDs and
auto-generated IDs can easily change.
Please assign an explicit ID to this {kind} component.
"""
)
if "dash_snapshots" in sys.modules:
raise RuntimeError(
f"""
Attempting to use an auto-generated ID in an app with `dash_snapshots`.
This is prohibited because snapshots saves the whole app layout,
including component IDs, and auto-generated IDs can easily change.
Callbacks referencing the new IDs will not work with old snapshots.
Please assign an explicit ID to this {kind} component.
"""
)
v = str(uuid.UUID(int=rd.randint(0, 2**128)))
setattr(self, "id", v)
return v
def to_plotly_json(self):
# Add normal properties
props = {
p: getattr(self, p)
for p in self._prop_names # pylint: disable=no-member
if hasattr(self, p)
}
# Add the wildcard properties data-* and aria-*
props.update(
{
k: getattr(self, k)
for k in self.__dict__
if any(
k.startswith(w)
# pylint:disable=no-member
for w in self._valid_wildcard_attributes
)
}
)
as_json = {
"props": props,
"type": self._type, # pylint: disable=no-member
"namespace": self._namespace, # pylint: disable=no-member
}
return as_json
# pylint: disable=too-many-branches, too-many-return-statements
# pylint: disable=redefined-builtin, inconsistent-return-statements
def _get_set_or_delete(self, id, operation, new_item=None):
_check_if_has_indexable_children(self)
# pylint: disable=access-member-before-definition,
# pylint: disable=attribute-defined-outside-init
if isinstance(self.children, Component):
if getattr(self.children, "id", None) is not None:
# Woohoo! It's the item that we're looking for
if self.children.id == id:
if operation == "get":
return self.children
if operation == "set":
self.children = new_item
return
if operation == "delete":
self.children = None
return
# Recursively dig into its subtree
try:
if operation == "get":
return self.children.__getitem__(id)
if operation == "set":
self.children.__setitem__(id, new_item)
return
if operation == "delete":
self.children.__delitem__(id)
return
except KeyError:
pass
# if children is like a list
if isinstance(self.children, (tuple, MutableSequence)):
for i, item in enumerate(self.children):
# If the item itself is the one we're looking for
if getattr(item, "id", None) == id:
if operation == "get":
return item
if operation == "set":
self.children[i] = new_item
return
if operation == "delete":
del self.children[i]
return
# Otherwise, recursively dig into that item's subtree
# Make sure it's not like a string
elif isinstance(item, Component):
try:
if operation == "get":
return item.__getitem__(id)
if operation == "set":
item.__setitem__(id, new_item)
return
if operation == "delete":
item.__delitem__(id)
return
except KeyError:
pass
# The end of our branch
# If we were in a list, then this exception will get caught
raise KeyError(id)
# Magic methods for a mapping interface:
# - __getitem__
# - __setitem__
# - __delitem__
# - __iter__
# - __len__
def __getitem__(self, id): # pylint: disable=redefined-builtin
"""Recursively find the element with the given ID through the tree of
children."""
# A component's children can be undefined, a string, another component,
# or a list of components.
return self._get_set_or_delete(id, "get")
def __setitem__(self, id, item): # pylint: disable=redefined-builtin
"""Set an element by its ID."""
return self._get_set_or_delete(id, "set", item)
def __delitem__(self, id): # pylint: disable=redefined-builtin
"""Delete items by ID in the tree of children."""
return self._get_set_or_delete(id, "delete")
def _traverse(self):
"""Yield each item in the tree."""
for t in self._traverse_with_paths():
yield t[1]
@staticmethod
def _id_str(component):
id_ = stringify_id(getattr(component, "id", ""))
return id_ and f" (id={id_:s})"
def _traverse_with_paths(self):
"""Yield each item with its path in the tree."""
children = getattr(self, "children", None)
children_type = type(children).__name__
children_string = children_type + self._id_str(children)
# children is just a component
if isinstance(children, Component):
yield "[*] " + children_string, children
# pylint: disable=protected-access
for p, t in children._traverse_with_paths():
yield "\n".join(["[*] " + children_string, p]), t
# children is a list of components
elif isinstance(children, (tuple, MutableSequence)):
for idx, i in enumerate(children):
list_path = f"[{idx:d}] {type(i).__name__:s}{self._id_str(i)}"
yield list_path, i
if isinstance(i, Component):
# pylint: disable=protected-access
for p, t in i._traverse_with_paths():
yield "\n".join([list_path, p]), t
def _traverse_ids(self):
"""Yield components with IDs in the tree of children."""
for t in self._traverse():
if isinstance(t, Component) and getattr(t, "id", None) is not None:
yield t
def __iter__(self):
"""Yield IDs in the tree of children."""
for t in self._traverse_ids():
yield t.id
def __len__(self):
"""Return the number of items in the tree."""
# TODO - Should we return the number of items that have IDs
# or just the number of items?
# The number of items is more intuitive but returning the number
# of IDs matches __iter__ better.
length = 0
if getattr(self, "children", None) is None:
length = 0
elif isinstance(self.children, Component):
length = 1
length += len(self.children)
elif isinstance(self.children, (tuple, MutableSequence)):
for c in self.children:
length += 1
if isinstance(c, Component):
length += len(c)
else:
# string or number
length = 1
return length
def __repr__(self):
# pylint: disable=no-member
props_with_values = [
c for c in self._prop_names if getattr(self, c, None) is not None
] + [
c
for c in self.__dict__
if any(c.startswith(wc_attr) for wc_attr in self._valid_wildcard_attributes)
]
if any(p != "children" for p in props_with_values):
props_string = ", ".join(
f"{p}={getattr(self, p)!r}" for p in props_with_values
)
else:
props_string = repr(getattr(self, "children", None))
return f"{self._type}({props_string})"
def _explicitize_args(func):
# Python 2
if hasattr(func, "func_code"):
varnames = func.func_code.co_varnames
# Python 3
else:
varnames = func.__code__.co_varnames
def wrapper(*args, **kwargs):
if "_explicit_args" in kwargs:
raise Exception("Variable _explicit_args should not be set.")
kwargs["_explicit_args"] = list(
set(list(varnames[: len(args)]) + [k for k, _ in kwargs.items()])
)
if "self" in kwargs["_explicit_args"]:
kwargs["_explicit_args"].remove("self")
return func(*args, **kwargs)
# If Python 3, we can set the function signature to be correct
if hasattr(inspect, "signature"):
# pylint: disable=no-member
new_sig = inspect.signature(wrapper).replace(
parameters=inspect.signature(func).parameters.values()
)
wrapper.__signature__ = new_sig
return wrapper

View File

@@ -0,0 +1,186 @@
import os
import sys
import json
import string
import shutil
import logging
import coloredlogs
import fire
import requests
from .._utils import run_command_with_process, compute_md5, job
logger = logging.getLogger(__name__)
coloredlogs.install(
fmt="%(asctime)s,%(msecs)03d %(levelname)s - %(message)s", datefmt="%H:%M:%S"
)
class BuildProcess:
def __init__(self, main, deps_info):
self.logger = logger
self.main = main
self.build_folder = self._concat(self.main, "build")
self.deps_info = deps_info
self.npm_modules = self._concat(self.main, "node_modules")
self.package_lock = self._concat(self.main, "package-lock.json")
self.package = self._concat(self.main, "package.json")
self._parse_package(path=self.package)
self.asset_paths = (self.deps_folder, self.npm_modules)
def _parse_package(self, path):
with open(path, "r", encoding="utf-8") as fp:
package = json.load(fp)
self.version = package["version"]
self.name = package["name"]
self.deps_folder = self._concat(self.main, os.pardir, "deps")
self.deps = package["dependencies"]
@staticmethod
def _concat(*paths):
return os.path.realpath(os.path.sep.join((path for path in paths if path)))
@staticmethod
def _clean_path(path):
if os.path.exists(path):
logger.warning("🚨 %s already exists, remove it!", path)
try:
if os.path.isfile(path):
os.remove(path)
if os.path.isdir(path):
shutil.rmtree(path)
except OSError:
sys.exit(1)
else:
logger.warning("🚨 %s doesn't exist, no action taken", path)
@job("clean all the previous assets generated by build tool")
def clean(self):
for path in self.asset_paths:
self._clean_path(path)
@job("run `npm ci`")
def npm(self):
"""Job to install npm packages."""
os.chdir(self.main)
run_command_with_process("npm ci")
@job("build the renderer in dev mode")
def watch(self):
os.chdir(self.main)
os.system("npm run build:dev")
@job("run the whole building process in sequence")
def build(self, build=None):
self.clean()
self.npm()
self.bundles(build)
self.digest()
@job("compute the hash digest for assets")
def digest(self):
if not os.path.exists(self.deps_folder):
try:
os.makedirs(self.deps_folder)
except OSError:
logger.exception("🚨 having issues manipulating %s", self.deps_folder)
sys.exit(1)
payload = {self.name: self.version}
for folder in (self.deps_folder, self.build_folder):
copies = tuple(
_
for _ in os.listdir(folder)
if os.path.splitext(_)[-1] in {".js", ".map"}
)
logger.info("bundles in %s %s", folder, copies)
for copy in copies:
payload[f"MD5 ({copy})"] = compute_md5(self._concat(folder, copy))
with open(self._concat(self.main, "digest.json"), "w", encoding="utf-8") as fp:
json.dump(payload, fp, sort_keys=True, indent=4, separators=(",", ":"))
logger.info(
"bundle digest in digest.json:\n%s",
json.dumps(payload, sort_keys=True, indent=4),
)
@job("copy and generate the bundles")
def bundles(self, build=None): # pylint:disable=too-many-locals
if not os.path.exists(self.deps_folder):
try:
os.makedirs(self.deps_folder)
except OSError:
logger.exception("🚨 having issues manipulating %s", self.deps_folder)
sys.exit(1)
self._parse_package(self.package_lock)
getattr(self, "_bundles_extra", lambda: None)()
versions = {
"version": self.version,
"package": self.name.replace(" ", "_").replace("-", "_"),
}
for scope, name, subfolder, filename, extras in self.deps_info:
version = self.deps["/".join(filter(None, [scope, name]))]["version"]
name_squashed = name.replace("-", "").replace(".", "")
versions[name_squashed] = version
logger.info("copy npm dependency => %s", filename)
ext = "min.js" if "min" in filename.split(".") else "js"
target = f"{name}@{version}.{ext}"
shutil.copyfile(
self._concat(self.npm_modules, scope, name, subfolder, filename),
self._concat(self.deps_folder, target),
)
if extras:
extras_str = '", "'.join(extras)
versions[f"extra_{name_squashed}_versions"] = f'"{extras_str}"'
for extra_version in extras:
url = f"https://unpkg.com/{name}@{extra_version}/umd/{filename}"
res = requests.get(url)
extra_target = f"{name}@{extra_version}.{ext}"
extra_path = self._concat(self.deps_folder, extra_target)
with open(extra_path, "wb") as fp:
fp.write(res.content)
_script = "build:dev" if build == "local" else "build:js"
logger.info("run `npm run %s`", _script)
os.chdir(self.main)
run_command_with_process(f"npm run {_script}")
logger.info("generate the `__init__.py` from template and versions")
with open(self._concat(self.main, "init.template"), encoding="utf-8") as fp:
t = string.Template(fp.read())
renderer_init = self._concat(self.deps_folder, os.pardir, "_dash_renderer.py")
with open(renderer_init, "w", encoding="utf-8") as fp:
fp.write(t.safe_substitute(versions))
class Renderer(BuildProcess):
def __init__(self):
"""dash-renderer's path is binding with the dash folder hierarchy."""
extras = ["18.2.0"] # versions to include beyond what's in package.json
super().__init__(
self._concat(os.path.dirname(__file__), os.pardir, "dash-renderer"),
(
("@babel", "polyfill", "dist", "polyfill.min.js", None),
(None, "react", "umd", "react.production.min.js", extras),
(None, "react", "umd", "react.development.js", extras),
(None, "react-dom", "umd", "react-dom.production.min.js", extras),
(None, "react-dom", "umd", "react-dom.development.js", extras),
(None, "prop-types", None, "prop-types.min.js", None),
(None, "prop-types", None, "prop-types.js", None),
),
)
def renderer():
fire.Fire(Renderer)

View File

@@ -0,0 +1,273 @@
from collections import OrderedDict
import json
import sys
import subprocess
import shlex
import os
import argparse
import shutil
import functools
import pkg_resources
import yaml
from ._r_components_generation import write_class_file
from ._r_components_generation import generate_exports
from ._py_components_generation import generate_class_file
from ._py_components_generation import generate_imports
from ._py_components_generation import generate_classes_files
from ._jl_components_generation import generate_struct_file
from ._jl_components_generation import generate_module
reserved_words = [
"UNDEFINED",
"REQUIRED",
"to_plotly_json",
"available_properties",
"available_wildcard_properties",
"_.*",
]
class _CombinedFormatter(
argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
):
pass
# pylint: disable=too-many-locals, too-many-arguments, too-many-branches, too-many-statements
def generate_components(
components_source,
project_shortname,
package_info_filename="package.json",
ignore="^_",
rprefix=None,
rdepends="",
rimports="",
rsuggests="",
jlprefix=None,
metadata=None,
keep_prop_order=None,
max_props=None,
):
project_shortname = project_shortname.replace("-", "_").rstrip("/\\")
is_windows = sys.platform == "win32"
extract_path = pkg_resources.resource_filename("dash", "extract-meta.js")
reserved_patterns = "|".join(f"^{p}$" for p in reserved_words)
os.environ["NODE_PATH"] = "node_modules"
shutil.copyfile(
"package.json", os.path.join(project_shortname, package_info_filename)
)
if not metadata:
env = os.environ.copy()
# Ensure local node modules is used when the script is packaged.
env["MODULES_PATH"] = os.path.abspath("./node_modules")
cmd = shlex.split(
f'node {extract_path} "{ignore}" "{reserved_patterns}" {components_source}',
posix=not is_windows,
)
proc = subprocess.Popen( # pylint: disable=consider-using-with
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=is_windows,
env=env,
)
out, err = proc.communicate()
status = proc.poll()
if err:
print(err.decode(), file=sys.stderr)
if not out:
print(
f"Error generating metadata in {project_shortname} (status={status})",
file=sys.stderr,
)
sys.exit(1)
metadata = safe_json_loads(out.decode("utf-8"))
py_generator_kwargs = {}
if keep_prop_order is not None:
keep_prop_order = [
component.strip(" ") for component in keep_prop_order.split(",")
]
py_generator_kwargs["prop_reorder_exceptions"] = keep_prop_order
if max_props:
py_generator_kwargs["max_props"] = max_props
generator_methods = [functools.partial(generate_class_file, **py_generator_kwargs)]
if rprefix is not None or jlprefix is not None:
with open("package.json", "r", encoding="utf-8") as f:
pkg_data = safe_json_loads(f.read())
if rprefix is not None:
if not os.path.exists("man"):
os.makedirs("man")
if not os.path.exists("R"):
os.makedirs("R")
if os.path.isfile("dash-info.yaml"):
with open("dash-info.yaml", encoding="utf-8") as yamldata:
rpkg_data = yaml.safe_load(yamldata)
else:
rpkg_data = None
generator_methods.append(
functools.partial(write_class_file, prefix=rprefix, rpkg_data=rpkg_data)
)
if jlprefix is not None:
generator_methods.append(
functools.partial(generate_struct_file, prefix=jlprefix)
)
components = generate_classes_files(project_shortname, metadata, *generator_methods)
with open(
os.path.join(project_shortname, "metadata.json"), "w", encoding="utf-8"
) as f:
json.dump(metadata, f, indent=2)
generate_imports(project_shortname, components)
if rprefix is not None:
generate_exports(
project_shortname,
components,
metadata,
pkg_data,
rpkg_data,
rprefix,
rdepends,
rimports,
rsuggests,
)
if jlprefix is not None:
generate_module(project_shortname, components, metadata, pkg_data, jlprefix)
def safe_json_loads(s):
jsondata_unicode = json.loads(s, object_pairs_hook=OrderedDict)
if sys.version_info[0] >= 3:
return jsondata_unicode
return byteify(jsondata_unicode)
def component_build_arg_parser():
parser = argparse.ArgumentParser(
prog="dash-generate-components",
formatter_class=_CombinedFormatter,
description="Generate dash components by extracting the metadata "
"using react-docgen. Then map the metadata to Python classes.",
)
parser.add_argument("components_source", help="React components source directory.")
parser.add_argument(
"project_shortname", help="Name of the project to export the classes files."
)
parser.add_argument(
"-p",
"--package-info-filename",
default="package.json",
help="The filename of the copied `package.json` to `project_shortname`",
)
parser.add_argument(
"-i",
"--ignore",
default="^_",
help="Files/directories matching the pattern will be ignored",
)
parser.add_argument(
"--r-prefix",
help="Specify a prefix for Dash for R component names, write "
"components to R dir, create R package.",
)
parser.add_argument(
"--r-depends",
default="",
help="Specify a comma-separated list of R packages to be "
"inserted into the Depends field of the DESCRIPTION file.",
)
parser.add_argument(
"--r-imports",
default="",
help="Specify a comma-separated list of R packages to be "
"inserted into the Imports field of the DESCRIPTION file.",
)
parser.add_argument(
"--r-suggests",
default="",
help="Specify a comma-separated list of R packages to be "
"inserted into the Suggests field of the DESCRIPTION file.",
)
parser.add_argument(
"--jl-prefix",
help="Specify a prefix for Dash for R component names, write "
"components to R dir, create R package.",
)
parser.add_argument(
"-k",
"--keep-prop-order",
default=None,
help="Specify a comma-separated list of components which will use the prop "
"order described in the component proptypes instead of alphabetically reordered "
"props. Pass the 'ALL' keyword to have every component retain "
"its original prop order.",
)
parser.add_argument(
"--max-props",
type=int,
default=250,
help="Specify the max number of props to list in the component signature. "
"More props will still be shown in the docstring, and will still work when "
"provided as kwargs to the component. Python <3.7 only supports 255 args, "
"but you may also want to reduce further for improved readability at the "
"expense of auto-completion for the later props. Use 0 to include all props.",
)
return parser
def cli():
args = component_build_arg_parser().parse_args()
generate_components(
args.components_source,
args.project_shortname,
package_info_filename=args.package_info_filename,
ignore=args.ignore,
rprefix=args.r_prefix,
rdepends=args.r_depends,
rimports=args.r_imports,
rsuggests=args.r_suggests,
jlprefix=args.jl_prefix,
keep_prop_order=args.keep_prop_order,
max_props=args.max_props,
)
# pylint: disable=undefined-variable
def byteify(input_object):
if isinstance(input_object, dict):
return OrderedDict(
[(byteify(key), byteify(value)) for key, value in input_object.iteritems()]
)
if isinstance(input_object, list):
return [byteify(element) for element in input_object]
if isinstance(input_object, unicode): # noqa:F821
return input_object.encode("utf-8")
return input_object
if __name__ == "__main__":
cli()

View File

@@ -0,0 +1,88 @@
import collections
import json
import os
from ._py_components_generation import (
generate_class_file,
generate_imports,
generate_classes_files,
generate_class,
)
from .base_component import ComponentRegistry
def _get_metadata(metadata_path):
# Start processing
with open(metadata_path, encoding="utf-8") as data_file:
json_string = data_file.read()
data = json.JSONDecoder(object_pairs_hook=collections.OrderedDict).decode(
json_string
)
return data
def load_components(metadata_path, namespace="default_namespace"):
"""Load React component metadata into a format Dash can parse.
Usage: load_components('../../component-suites/lib/metadata.json')
Keyword arguments:
metadata_path -- a path to a JSON file created by
[`react-docgen`](https://github.com/reactjs/react-docgen).
Returns:
components -- a list of component objects with keys
`type`, `valid_kwargs`, and `setup`.
"""
# Register the component lib for index include.
ComponentRegistry.registry.add(namespace)
components = []
data = _get_metadata(metadata_path)
# Iterate over each property name (which is a path to the component)
for componentPath in data:
componentData = data[componentPath]
# Extract component name from path
# e.g. src/components/MyControl.react.js
# TODO Make more robust - some folks will write .jsx and others
# will be on windows. Unfortunately react-docgen doesn't include
# the name of the component atm.
name = componentPath.split("/").pop().split(".")[0]
component = generate_class(
name, componentData["props"], componentData["description"], namespace, None
)
components.append(component)
return components
def generate_classes(namespace, metadata_path="lib/metadata.json"):
"""Load React component metadata into a format Dash can parse, then create
Python class files.
Usage: generate_classes()
Keyword arguments:
namespace -- name of the generated Python package (also output dir)
metadata_path -- a path to a JSON file created by
[`react-docgen`](https://github.com/reactjs/react-docgen).
Returns:
"""
data = _get_metadata(metadata_path)
imports_path = os.path.join(namespace, "_imports_.py")
# Make sure the file doesn't exist, as we use append write
if os.path.exists(imports_path):
os.remove(imports_path)
components = generate_classes_files(namespace, data, generate_class_file)
# Add the __all__ value so we can import * from _imports_
generate_imports(namespace, components)

View File

@@ -0,0 +1,162 @@
import sys
import subprocess
import shlex
import os
import argparse
import shutil
import logging
import coloredlogs
class _CombinedFormatter(
argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
):
pass
logger = logging.getLogger(__name__)
coloredlogs.install(
fmt="%(asctime)s,%(msecs)03d %(levelname)s - %(message)s", datefmt="%H:%M:%S"
)
def bootstrap_components(components_source, concurrency, install_type):
is_windows = sys.platform == "win32"
source_glob = (
components_source
if components_source != "all"
else "dash-core-components|dash-html-components|dash-table"
)
cmdstr = f"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm {install_type}"
cmd = shlex.split(cmdstr, posix=not is_windows)
print(cmdstr)
with subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows
) as proc:
out, err = proc.communicate()
status = proc.poll()
if err:
print(("🛑 " if status else "") + err.decode(), file=sys.stderr)
if status or not out:
print(
f"🚨 Failed installing npm dependencies for component packages: {source_glob} (status={status}) 🚨",
file=sys.stderr,
)
sys.exit(1)
else:
print(
f"🟢 Finished installing npm dependencies for component packages: {source_glob} 🟢",
file=sys.stderr,
)
def build_components(components_source, concurrency):
is_windows = sys.platform == "win32"
source_glob = (
components_source
if components_source != "all"
else "dash-core-components|dash-html-components|dash-table"
)
cmdstr = f"npx lerna exec --concurrency {concurrency} --scope *@({source_glob})* -- npm run build"
cmd = shlex.split(cmdstr, posix=not is_windows)
print(cmdstr)
with subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows
) as proc:
out, err = proc.communicate()
status = proc.poll()
if err:
print(("🛑 " if status else "") + err.decode(), file=sys.stderr)
if status or not out:
print(
f"🚨 Finished updating component packages: {source_glob} (status={status}) 🚨",
file=sys.stderr,
)
sys.exit(1)
for package in source_glob.split("|"):
build_directory = os.path.join(
"components", package, package.replace("-", "_").rstrip("/\\")
)
dest_dir = (
"dcc"
if package == "dash-core-components"
else "html"
if package == "dash-html-components"
else "dash_table"
)
dest_path = os.path.join("dash", dest_dir)
if not os.path.exists(dest_path):
try:
os.makedirs(dest_path)
except OSError:
logger.exception("🚨 Having issues manipulating %s", dest_path)
sys.exit(1)
if not os.path.exists(build_directory):
print(
"🚨 Could not locate build artifacts."
+ " Check that the npm build process completed"
+ f" successfully for package: {package} 🚨"
)
sys.exit(1)
else:
print(f"🚚 Moving build artifacts from {build_directory} to Dash 🚚")
shutil.rmtree(dest_path)
shutil.copytree(build_directory, dest_path)
with open(os.path.join(dest_path, ".gitkeep"), "w", encoding="utf-8"):
pass
print(f"🟢 Finished moving build artifacts from {build_directory} to Dash 🟢")
def cli():
parser = argparse.ArgumentParser(
prog="dash-update-components",
formatter_class=_CombinedFormatter,
description="Update the specified subcomponent libraries within Dash"
" by copying over build artifacts, dependencies, and dependency metadata.",
)
parser.add_argument(
"components_source",
help="A glob string that matches the Dash component libraries to be updated"
" (eg.'dash-table' // 'dash-core-components|dash-html-components' // 'all')."
" The default argument is 'all'.",
default="all",
)
parser.add_argument(
"--concurrency",
type=int,
default=3,
help="Maximum concurrent steps, up to 3 (ie all components in parallel)",
)
parser.add_argument(
"--ci",
help="For clean-install use '--ci True'",
default="False",
)
args = parser.parse_args()
bootstrap_components(
args.components_source, args.concurrency, "ci" if args.ci == "True" else "i"
)
build_components(args.components_source, args.concurrency)
if __name__ == "__main__":
cli()