diff --git a/.gitignore b/.gitignore index 327b954..139a518 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ venv .env __pycache__ runtime +poetry.lock \ No newline at end of file diff --git a/README.md b/README.md index 16522da..adf06b9 100644 --- a/README.md +++ b/README.md @@ -9,66 +9,39 @@ Check the following example on how this parser will translate a Discord message: ![image](https://user-images.githubusercontent.com/1405498/131235730-94ba8100-2b42-492f-9479-bbce80c592f0.png) ```python -( - {'node_type': 'ITALIC', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'italic star single'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n'}, - - {'node_type': 'ITALIC', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'italic underscore single'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n'}, - - {'node_type': 'BOLD', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'bold single'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n'}, - - {'node_type': 'UNDERLINE', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'underline single'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n'}, - - {'node_type': 'STRIKETHROUGH', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'strikethrough single'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n\n'}, - - {'node_type': 'QUOTE_BLOCK', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'quote\nblock\n'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n'}, - - {'node_type': 'CODE_INLINE', - 'children': ( - {'node_type': 'TEXT', 'text_content': 'inline code'}, - )}, - - {'node_type': 'TEXT', 'text_content': '\n\n'}, - - {'node_type': 'QUOTE_BLOCK', - 'children': ( - {'node_type': 'CODE_BLOCK', - 'code_lang': 'python', - 'children': ( - {'node_type': 'TEXT', - 'text_content': 'code\nblock\nwith\npython\nhighlighting\n'},), - }, - )}, -) +[ + {'node_type': 'ITALIC', 'content': 'italic star single', 'children': [ + {'node_type': 'TEXT', 'content': 'italic star single', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n', 'children': []}, + {'node_type': 'ITALIC', 'content': 'italic underscore single', 'children': [ + {'node_type': 'TEXT', 'content': 'italic underscore single', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n', 'children': []}, + {'node_type': 'BOLD', 'content': 'bold single', 'children': [ + {'node_type': 'TEXT', 'content': 'bold single', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n', 'children': []}, + {'node_type': 'UNDERLINE', 'content': 'underline single', 'children': [ + {'node_type': 'TEXT', 'content': 'underline single', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n', 'children': []}, + {'node_type': 'STRIKETHROUGH', 'content': 'strikethrough single', 'children': [ + {'node_type': 'TEXT', 'content': 'strikethrough single', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n\n', 'children': []}, + {'node_type': 'QUOTE_BLOCK', 'content': 'quote\nblock\n', 'children': [ + {'node_type': 'TEXT', 'content': 'quote\nblock\n', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n', 'children': []}, + {'node_type': 'CODE_INLINE', 'content': 'inline code', 'children': [ + {'node_type': 'TEXT', 'content': 'inline code', 'children': []} + ]}, + {'node_type': 'TEXT', 'content': '\n\n', 'children': []}, + {'node_type': 'QUOTE_BLOCK', 'content': '```py\ncode\nblock\nwith\npython\nhighlighting\n```', 'children': [ + {'node_type': 'CODE_BLOCK', 'content': 'code\nblock\nwith\npython\nhighlighting\n', 'code_lang': 'py', 'children': []} + ]} +] ``` ### Installation @@ -91,49 +64,81 @@ ast_tuple_of_nodes = parse(message_content) These are the types of nodes the parser will output: ``` TEXT -- fields: "text_content" +- fields: "content" - Just standard text, no additional formatting - No child nodes ITALIC, BOLD, UNDERLINE, STRIKETHROUGH, SPOILER, CODE_INLINE -- fields: "children" +- fields: "children" "content" - self-explanatory QUOTE_BLOCK -- fields: "children" +- fields: "children" "content" - represents a single, uninterrupted code block (no gaps in Discord's client) - can not contain another quote block (Discord has no nested quotes) CODE_BLOCK -- fields: "children", "code_lang" -- can only contain a single TEXT node, all other markdown syntax inside the code block - is ignored +- fields: "code_lang" "content" - may or may not have a language specifier - first newline is stripped according to the same rules that the Discord client uses USER, ROLE, CHANNEL -- fields: "discord_id" +- fields: "id" - user, role, or channel mention - there is no way to retrieve the user/role/channel name, color or channel type (text/voice/stage) from just the message, so you'll have to use the API (or discord.py) to query that -URL_WITH_PREVIEW, URL_WITHOUT_PREVIEW -- fields: "url" +URL_WITH_PREVIEW, URL_WITHOUT_PREVIEW URL_WITH_PREVIEW_EMBEDDED URL_WITHOUT_PREVIEW_EMBEDDED +- fields: "url" "content" - a HTTP URL - this is only recognized if the link actually contains "http". this is the same for the Discord client, with the exception that the Discord client also scan for invite links that don't start with http, e.g., "discord.gg/pxa" - the WITHOUT_PREVIEW variant appears when the message contains the URL in the form, which causes the Discord client to suppress the preview +- content is provided for the URL_WITH_PREVIEW_EMBEDDED and URL_WITHOUT_PREVIEW_EMBEDDED variants -EMOJI_CUSTOM -- fields: "emoji_name", "emoji_id" -- you can get the custom emoji's image by querying to +EMOJI_CUSTOM, EMOJI_CUSTOM_ANIMATED +- fields: "content", "id" "url" +- URLs are returned in the following way https://cdn.discordapp.com/emojis/EMOJI_ID.png + https://cdn.discordapp.com/emojis/EMOJI_ID.gif + +EMOJI_UNICODE +- fields: "content" "url" +- unicode emoji, e.g., 🚗 +- URLs are returned in the following way + https://emoji.fileformat.info/png/1f697.png + EMOJI_UNICODE_ENCODED -- fields: "emoji_name" +- fields: "content" +- unicode emojis that are encoded using the Discord client's emoji encoding method +- this will appear very rarely. unicode emojis are usually just posted as unicode + characters and thus end up in a TEXT node + +EMOJI_CUSTOM_ENCODED, EMOJI_CUSTOM_ANIMATED_ENCODED +- fields: "content", "id" +- custom emojis that are encoded using the Discord client's emoji encoding method +- you can get the custom emoji's image by querying to + https://cdn.discordapp.com/emojis/EMOJI_ID.png + +EMOJI_CUSTOM_NAME, EMOJI_CUSTOM_ANIMATED_NAME +- fields: "content", "name" +- custom emojis that are posted using their name, e.g., :red_car: +- you can get the custom emoji's image by querying to + https://cdn.discordapp.com/emojis/EMOJI_ID.png + +EMOJI_CUSTOM_NAME_ENCODED, EMOJI_CUSTOM_ANIMATED_NAME_ENCODED +- fields: "content", "name" +- custom emojis that are posted using their name and encoded using the Discord client's + emoji encoding method, e.g., <:red_car:123456789123456789> +- you can get the custom emoji's image by querying to + https://cdn.discordapp.com/emojis/EMOJI_ID.png + +EMOJI_UNICODE_ENCODED +- fields: "content" - this will appear very rarely. unicode emojis are usually just posted as unicode characters and thus end up in a TEXT node it is, however, possible to send a message from a bot that uses, e.g., :red_car: instead of the actual red_car unicode emoji. @@ -149,9 +154,6 @@ with how it's rendered in the Discord client: - `***bold and italic***` will be detected as bold-only with extra stars. This only happens when the italic and bold stars are right next to each other. This does not happen when mixing bold stars with italic underscores. -- `*italic with whitespace before star closer *` - will be detected as italic even though the Discord client won't. - Note that Discord doesn't have this weird requirement for `_underscore italic_`. - ```` ||spoilers around ``` @@ -162,4 +164,7 @@ with how it's rendered in the Discord client: will be detected as spoilers spanning the code segments, although the Discord the client will only show spoiler bars before and after the code segment, but not on top of it. - +- Custom parsers are experimental, tends to work for different pair of values. +- The URL matching scheme of Discord is quite complex and not fully understood, so there + might be some edge cases where the parser doesn't recognize a URL that the Discord + client does, and vice versa. diff --git a/discord_markdown_ast_parser/__init__.py b/discord_markdown_ast_parser/__init__.py index 2cf9719..1c7f83c 100644 --- a/discord_markdown_ast_parser/__init__.py +++ b/discord_markdown_ast_parser/__init__.py @@ -1,23 +1,30 @@ -from typing import Any, Dict, List +from typing import Any, Dict, List, Union -from discord_markdown_ast_parser.lexer import lex -from discord_markdown_ast_parser.parser import Node, parse_tokens +from .lexer import lex, Lexing +from .parser import Node, parse_tokens -def parse(text) -> List[Node]: +def lexing_list_convert(lexing: Union[List[Lexing], Lexing]) -> List[Lexing]: + if not isinstance(lexing, list): + lexing = [lexing] + return [Lexing(item) if isinstance(item, str) else item for item in lexing] + + +def parse(text, custom: Dict[str, List[Lexing]] = None) -> List[Node]: """ Parses the text and returns an AST, using this package's internal Node representation. See parse_to_dict for a more generic string representation. """ - tokens = list(lex(text)) - return parse_tokens(tokens) + custom = custom if custom is not None else {} + custom = {k: lexing_list_convert(v) for k, v in custom.items()} + tokens = list(lex(text, custom)) + return parse_tokens(tokens, custom) -def parse_to_dict(text) -> List[Dict[str, Any]]: +def parse_to_dict(text, custom: Dict[str, List[Lexing]] = None) -> List[Dict[str, Any]]: """ Parses the text and returns an AST, represented as a dict. See the README for information on the structure of this dict. """ - node_ast = parse(text) - return [node.to_dict() for node in node_ast] + return [node.to_dict() for node in parse(text, custom)] diff --git a/discord_markdown_ast_parser/lexer.py b/discord_markdown_ast_parser/lexer.py index 17aa95c..2886309 100644 --- a/discord_markdown_ast_parser/lexer.py +++ b/discord_markdown_ast_parser/lexer.py @@ -1,76 +1,75 @@ import re -from dataclasses import dataclass +from dataclasses import dataclass, field from enum import Enum -from typing import Optional, List, Generator - - -class TokenType(Enum): - TEXT_INLINE = 1 - NEWLINE = 2 - STAR = 3 - UNDERSCORE = 4 - TILDE = 5 - SPOILER_DELIMITER = 6 - USER_MENTION = 7 - ROLE_MENTION = 8 - CHANNEL_MENTION = 9 - EMOJI_CUSTOM = 10 - EMOJI_UNICODE_ENCODED = 11 - URL_WITH_PREVIEW = 12 - URL_WITHOUT_PREVIEW = 13 - QUOTE_LINE_PREFIX = 14 - CODE_INLINE_DELIMITER = 15 - CODE_BLOCK_DELIMITER = 16 +from typing import Optional, List, Generator, Dict +import itertools -@dataclass -class Token: - token_type: TokenType - value: str - groups: Optional[List[str]] = None +class Lexing: + def __init__(self, pattern: Optional[str] = None, flags: re.RegexFlag = re.NOFLAG): + self.regex = re.compile(pattern, flags=flags) if pattern else None + + def __call__(self, text: str): + return self.regex and self.regex.match(text) + + def __repr__(self): + return f"{self.__class__.__name__}({self.regex and self.regex.pattern!r})" + + +URL_REGEX = r"https?:\/\/(www\.)?[-\w@:%.\+~#=]{1,256}\.[a-z]{2,63}\b([-\w@:%.\+~#=?&//]*)" + + +class LexingRule(Lexing, Enum): + USER_MENTION = r"<@!?(\d{15,20})>" + ROLE_MENTION = r"<@&(\d{15,20})>" + SLASH_COMMAND_MENTION = r"" + CHANNEL_MENTION = r"<#(\d{15,20})>" + TIMESTAMP = r"" + EMOJI_CUSTOM = r"<:([\w]{2,}):(\d{15,20})>" + EMOJI_CUSTOM_ANIMATED = r"" + EMOJI_UNICODE = r"(\u00a9|\u00ae|[\u2000-\u3300]|\ud83c[\ud000-\udfff]|\ud83d[\ud000-\udfff]|\ud83e[\ud000-\udfff])" + EMOJI_UNICODE_ENCODED = r":([\w]+):" + URL_WITHOUT_PREVIEW_EMBEDDED = fr"\[([^\]]+)\]\(<({URL_REGEX})>\)" + URL_WITH_PREVIEW_EMBEDDED = fr"\[([^\]]+)\]\(({URL_REGEX})\)" + URL_WITHOUT_PREVIEW = fr"<{URL_REGEX}>" + URL_WITH_PREVIEW = URL_REGEX + QUOTE_LINE_PREFIX = r"(>>)?> " + TILDE = r"~" + STAR = r"\*" + UNDERSCORE = r"_" + SPOILER_DELIMITER = r"\|\|" + CODE_BLOCK_DELIMITER = r"```" + CODE_INLINE_DELIMITER = r"`" + NEWLINE = r"\n" + TEXT_INLINE = "" @dataclass -class LexingRule: - token_type: TokenType - pattern: Optional[str] = None +class Token: + value: str = "" + lexing_rule: Lexing = LexingRule.TEXT_INLINE + groups: List[str] = field(default_factory=list) + def __contains__(self, rule: Lexing): + return self.lexing_rule == rule -def lex(input_text: str) -> Generator[Token, None, None]: - """ - Scans the input text for sequences of characters (=tokens), identified by regular - expressions, that have a special meaning in Discord's Markdown. - This function takes care of identifying the low-level elements of the text such as - markdown special characters. It also does pretty much all of the parsing work for - simple structures such as user mentions that can be identified via regular - expressions. +def lex(input_text: str, custom: Optional[Dict[str, List[Lexing]]] = None) -> Generator[Token, None, None]: + """Lexes the input text and returns a generator of tokens. + The generator will yield a token for each lexing rule that matches the input text. - Will output the tokens in the order that they appear in the input text. + Args: + input_text (str): String to lex + + Yields: + Generator[Token, None, None]: Generator of tokens """ - # There will be cases when no specific lexing rules matches. - # - # This happens when what we're looking at is just simple text with no special - # markdown meaning. - # - # Problem is: We're generally only trying to match our regex pattern against the - # prefix of what we're looking at, so if we go through all of our rules and end up - # noticing "Oh, that's just text", then we don't know how long that text segment - # is going to be. - # - # So we're going to continue scanning until we arrive at something that is not just - # text, at which point we're going to output all the text we've found as a single - # text token. seen_simple_text = "" + custom = custom or {} - while True: - if len(input_text) == 0: - if len(seen_simple_text) > 0: - yield Token(TokenType.TEXT_INLINE, seen_simple_text) - return - - for rule in lexing_rules: - match = re.match(rule.pattern, input_text) + while input_text: + for rule in itertools.chain(*custom.values(), LexingRule): + match = rule(input_text) if match is not None: matching_rule = rule break @@ -84,37 +83,10 @@ def lex(input_text: str) -> Generator[Token, None, None]: # yield inline text if we have some left if len(seen_simple_text) > 0: - yield Token(TokenType.TEXT_INLINE, seen_simple_text) + yield Token(seen_simple_text) seen_simple_text = "" - groups = None - if len(match.groups()) > 0: - groups = match.groups() - - yield Token(matching_rule.token_type, match[0], groups) - - -# stolen from https://www.urlregex.com/ -URL_REGEX = ( - r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+" -) - -lexing_rules = [ - LexingRule(token_type=TokenType.USER_MENTION, pattern="<@!?([0-9]+)>"), - LexingRule(token_type=TokenType.ROLE_MENTION, pattern="<@&([0-9]+)>"), - LexingRule(token_type=TokenType.CHANNEL_MENTION, pattern="<#([0-9]+)>"), - LexingRule( - token_type=TokenType.EMOJI_CUSTOM, pattern="<:([a-zA-Z0-9_]{2,}):([0-9]+)>" - ), - LexingRule(token_type=TokenType.EMOJI_UNICODE_ENCODED, pattern=":([a-zA-Z0-9_]+):"), - LexingRule(token_type=TokenType.URL_WITHOUT_PREVIEW, pattern=f"<{URL_REGEX}>"), - LexingRule(token_type=TokenType.URL_WITH_PREVIEW, pattern=URL_REGEX), - LexingRule(token_type=TokenType.QUOTE_LINE_PREFIX, pattern=r"(>>)?> "), - LexingRule(token_type=TokenType.TILDE, pattern=r"~"), - LexingRule(token_type=TokenType.STAR, pattern=r"\*"), - LexingRule(token_type=TokenType.UNDERSCORE, pattern=r"_"), - LexingRule(token_type=TokenType.SPOILER_DELIMITER, pattern=r"\|\|"), - LexingRule(token_type=TokenType.CODE_BLOCK_DELIMITER, pattern=r"```"), - LexingRule(token_type=TokenType.CODE_INLINE_DELIMITER, pattern=r"`"), - LexingRule(token_type=TokenType.NEWLINE, pattern="\n"), -] + yield Token(match[0], matching_rule, match.groups()) + + if len(seen_simple_text) > 0: + yield Token(seen_simple_text) diff --git a/discord_markdown_ast_parser/parser.py b/discord_markdown_ast_parser/parser.py index d870d17..5386727 100644 --- a/discord_markdown_ast_parser/parser.py +++ b/discord_markdown_ast_parser/parser.py @@ -1,67 +1,90 @@ import re -from dataclasses import dataclass +from dataclasses import dataclass, field from enum import Enum +import itertools from typing import Optional, Generator, Any, List, Dict, Tuple, Iterable -from discord_markdown_ast_parser.lexer import Token, TokenType - - -class NodeType(Enum): - TEXT = 1 - ITALIC = 2 - BOLD = 3 - UNDERLINE = 4 - STRIKETHROUGH = 5 - SPOILER = 6 - USER = 7 - ROLE = 8 - CHANNEL = 9 - EMOJI_CUSTOM = 10 - EMOJI_UNICODE_ENCODED = 11 - URL_WITH_PREVIEW = 12 - URL_WITHOUT_PREVIEW = 13 - QUOTE_BLOCK = 14 - CODE_BLOCK = 15 - CODE_INLINE = 16 +from .lexer import Token, LexingRule, Lexing + + +NodeType = Enum( + "NodeType", + [ + "TEXT", + "ITALIC", + "BOLD", + "UNDERLINE", + "STRIKETHROUGH", + "SPOILER", + "USER", + "ROLE", + "CHANNEL", + "SLASH_COMMAND", + "EMOJI_CUSTOM", + "EMOJI_CUSTOM_ANIMATED", + "EMOJI_UNICODE", + "EMOJI_UNICODE_ENCODED", + "URL_WITH_PREVIEW_EMBEDDED", + "URL_WITHOUT_PREVIEW_EMBEDDED", + "URL_WITH_PREVIEW", + "URL_WITHOUT_PREVIEW", + "TIMESTAMP", + "QUOTE_BLOCK", + "CODE_BLOCK", + "CODE_INLINE", + "CUSTOM", + ], + start=1, +) + +# format: delimiter, type +DEFAULT_MODIFIERS = [ + ([[LexingRule.STAR, LexingRule.STAR]], NodeType.BOLD), + ([[LexingRule.UNDERSCORE, LexingRule.UNDERSCORE]], NodeType.UNDERLINE), + ([[LexingRule.TILDE, LexingRule.TILDE]], NodeType.STRIKETHROUGH), + ([[LexingRule.STAR]], NodeType.ITALIC), + ([[LexingRule.UNDERSCORE]], NodeType.ITALIC), + ([[LexingRule.SPOILER_DELIMITER]], NodeType.SPOILER), + ([[LexingRule.CODE_INLINE_DELIMITER]], NodeType.CODE_INLINE), +] +LANG_SPEC = re.compile(r"([a-zA-Z0-9-]*)(.*)") @dataclass class Node: - node_type: NodeType - - # only set on TEXT type - text_content: Optional[str] = None - - discord_id: Optional[int] = None - emoji_name: Optional[str] = None + node_type: NodeType = NodeType.TEXT + content: Optional[str] = None + id: Optional[int] = None code_lang: Optional[str] = None url: Optional[str] = None + children: List["Node"] = field(default_factory=list) - # set on everything but TEXT type - # some node types always have exactly one child - children: Optional[List["Node"]] = None + def __post_init__(self): + self.children = self.children or [] def to_dict(self) -> Dict[str, Any]: # copy all properties that are not None self_dict = {k: v for k, v in self.__dict__.items() if v is not None} # convert NodeType to string - self_dict["node_type"] = self.node_type.name + self_dict["node_type"] = self.node_type if isinstance(self.node_type, str) else self.node_type.name # recursively convert children to dict - if self.children is not None: + if self.children: self_dict["children"] = [node.to_dict() for node in self.children] return self_dict -def parse_tokens(tokens: List[Token]) -> List[Node]: +def parse_tokens( + tokens: List[Token], custom: Dict[str, List[Lexing]] = None +) -> List[Node]: """ This is a temporary workaround to combat a shortcoming of parse_tokens_generator. The interesting code is in parse_tokens_generator. You will find a description of this shortcoming in a comment at the end of parse_tokens_generator. """ - return merge_text_nodes(parse_tokens_generator(tokens)) + return merge_text_nodes(parse_tokens_generator(tokens, custom=custom)) def merge_text_nodes(subtree: Iterable[Node]) -> List[Node]: @@ -78,12 +101,12 @@ def merge_text_nodes(subtree: Iterable[Node]) -> List[Node]: if prev_text_node is None: prev_text_node = node else: - prev_text_node.text_content += node.text_content + prev_text_node.content += node.content continue # don't store this node else: prev_text_node = None - if node.children is not None: + if node.children: node.children = merge_text_nodes(node.children) compressed_tree.append(node) @@ -92,7 +115,7 @@ def merge_text_nodes(subtree: Iterable[Node]) -> List[Node]: def parse_tokens_generator( - tokens: List[Token], in_quote=False + tokens: List[Token], in_quote: bool = False, custom: Dict[str, List[Lexing]] = None, ) -> Generator[Node, None, None]: """ Scans the lexed tokens and identifies more complex and possibly nested structures @@ -104,6 +127,7 @@ def parse_tokens_generator( Keep in mind, however, that these nodes may have deeply nested children nodes which won't appear on the root level. """ + custom = custom if custom is not None else {} i = 0 while i < len(tokens): current_token = tokens[i] @@ -112,57 +136,134 @@ def parse_tokens_generator( # just continue once any of them match # text - if current_token.token_type == TokenType.TEXT_INLINE: - yield Node(NodeType.TEXT, text_content=current_token.value) + if LexingRule.TEXT_INLINE in current_token: + yield Node(NodeType.TEXT, content=current_token.value) i += 1 continue # user mentions - if current_token.token_type == TokenType.USER_MENTION: - yield Node(NodeType.USER, discord_id=int(current_token.groups[0])) + if LexingRule.USER_MENTION in current_token: + yield Node(NodeType.USER, id=int(current_token.groups[0]), content=current_token.value) i += 1 continue # role mentions - if current_token.token_type == TokenType.ROLE_MENTION: - yield Node(NodeType.ROLE, discord_id=int(current_token.groups[0])) + if LexingRule.ROLE_MENTION in current_token: + yield Node(NodeType.ROLE, id=int(current_token.groups[0]), content=current_token.value) + i += 1 + continue + + # unix timestamps + if LexingRule.TIMESTAMP in current_token: + yield Node( + NodeType.TIMESTAMP, + id=int(current_token.groups[0]), + code_lang=current_token.groups[1], + content=current_token.value, + ) i += 1 continue # channel mentions - if current_token.token_type == TokenType.CHANNEL_MENTION: - yield Node(NodeType.CHANNEL, discord_id=int(current_token.groups[0])) + if LexingRule.CHANNEL_MENTION in current_token: + yield Node(NodeType.CHANNEL, id=int(current_token.groups[0]), content=current_token.value) + i += 1 + continue + + # slash commands + if LexingRule.SLASH_COMMAND_MENTION in current_token: + yield Node( + NodeType.SLASH_COMMAND, + code_lang=current_token.groups[0], + id=int(current_token.groups[1]), + content=current_token.value, + ) i += 1 continue # custom emoji - if current_token.token_type == TokenType.EMOJI_CUSTOM: + if LexingRule.EMOJI_CUSTOM in current_token: + emoji_id = int(current_token.groups[1]) yield Node( NodeType.EMOJI_CUSTOM, - discord_id=int(current_token.groups[1]), - emoji_name=current_token.groups[0], + id=emoji_id, + content=current_token.value, + code_lang=current_token.groups[0], + url=f"https://cdn.discordapp.com/emojis/{emoji_id}.png" + ) + i += 1 + continue + + # custom animated emoji + if LexingRule.EMOJI_CUSTOM_ANIMATED in current_token: + emoji_id = int(current_token.groups[1]) + yield Node( + NodeType.EMOJI_CUSTOM_ANIMATED, + id=emoji_id, + code_lang=current_token.groups[0], + content=current_token.value, + url=f"https://cdn.discordapp.com/emojis/{emoji_id}.gif" + ) + i += 1 + continue + + # unicode emoji (when it's written as unicode) + if LexingRule.EMOJI_UNICODE in current_token: + emoji = current_token.groups[0][0] + yield Node( + NodeType.EMOJI_UNICODE, + content=emoji, + id=ord(emoji), + url=f"https://emoji.fileformat.info/png/{ord(emoji):x}.png" ) i += 1 continue # unicode emoji (when it's encoded as :name: and not just written as unicode) - if current_token.token_type == TokenType.EMOJI_UNICODE_ENCODED: + if LexingRule.EMOJI_UNICODE_ENCODED in current_token: yield Node( NodeType.EMOJI_UNICODE_ENCODED, - emoji_name=current_token.groups[0], + content=current_token.value, + code_lang=current_token.groups[0], + ) + i += 1 + continue + + # URL with preview embedded + if LexingRule.URL_WITH_PREVIEW_EMBEDDED in current_token: + yield Node( + NodeType.URL_WITH_PREVIEW_EMBEDDED, + url=current_token.groups[1], + code_lang=current_token.groups[0], + content=current_token.value, ) i += 1 continue + # URL without preview + if LexingRule.URL_WITHOUT_PREVIEW_EMBEDDED in current_token: + yield Node( + NodeType.URL_WITHOUT_PREVIEW_EMBEDDED, + url=current_token.groups[1], + code_lang=current_token.groups[0], + content=current_token.value, + ) + i += 1 + continue + # URL with preview - if current_token.token_type == TokenType.URL_WITH_PREVIEW: - yield Node(NodeType.URL_WITH_PREVIEW, url=current_token.value) + if LexingRule.URL_WITH_PREVIEW in current_token: + yield Node( + NodeType.URL_WITH_PREVIEW, + url=current_token.value, + content=current_token.value, + ) i += 1 continue # URL without preview - if current_token.token_type == TokenType.URL_WITHOUT_PREVIEW: - yield Node(NodeType.URL_WITHOUT_PREVIEW, url=current_token.value[1:-1]) + if LexingRule.URL_WITHOUT_PREVIEW in current_token: + yield Node(NodeType.URL_WITHOUT_PREVIEW, url=current_token.value[1:-1], content=current_token.value) i += 1 continue @@ -181,21 +282,11 @@ def parse_tokens_generator( # known issue: # we don't account for the fact that spoilers can't wrap code blocks - # format: delimiter, type - text_modifiers = [ - ([TokenType.STAR, TokenType.STAR], NodeType.BOLD), - ([TokenType.UNDERSCORE, TokenType.UNDERSCORE], NodeType.UNDERLINE), - ([TokenType.TILDE, TokenType.TILDE], NodeType.STRIKETHROUGH), - ([TokenType.STAR], NodeType.ITALIC), - ([TokenType.UNDERSCORE], NodeType.ITALIC), - ([TokenType.SPOILER_DELIMITER], NodeType.SPOILER), - ([TokenType.CODE_INLINE_DELIMITER], NodeType.CODE_INLINE), - ] - + text_modifiers = [([[v[0]], [v[-1]]], k) for k, v in custom.items() if k and v] node, amount_consumed_tokens = None, None - for delimiter, node_type in text_modifiers: + for delimiter, node_type in itertools.chain(text_modifiers, DEFAULT_MODIFIERS): node, amount_consumed_tokens = try_parse_node_with_children( - tokens[i:], delimiter, delimiter, node_type, in_quote + tokens[i:], delimiter[0], delimiter[-1], node_type, in_quote, custom=custom ) if node is not None: break @@ -222,9 +313,9 @@ def parse_tokens_generator( # is, in HTML, test
# and not
test
- if current_token.token_type == TokenType.CODE_BLOCK_DELIMITER: + if LexingRule.CODE_BLOCK_DELIMITER in current_token: children_token, amount_consumed_tokens = search_for_closer( - tokens[i + 1 :], [TokenType.CODE_BLOCK_DELIMITER] + tokens[i + 1 :], [current_token.lexing_rule] ) if children_token is not None: children_content = "" @@ -243,7 +334,7 @@ def parse_tokens_generator( non_empty_line_found = True break if non_empty_line_found: - match = re.fullmatch(r"([a-zA-Z0-9-]*)(.*)", lines[0]) + match = LANG_SPEC.fullmatch(lines[0]) # if there is any behind the lang spec, then it is normal text # otherwise, it is either a lang spec (gets removed from the # displayed text) or it is empty (newline gets removed) @@ -253,8 +344,9 @@ def parse_tokens_generator( lang = match[1] children_content = "\n".join(lines) - child_node = Node(NodeType.TEXT, text_content=children_content) - yield Node(NodeType.CODE_BLOCK, code_lang=lang, children=[child_node]) + yield Node( + NodeType.CODE_BLOCK, code_lang=lang, content=children_content + ) i += 1 + amount_consumed_tokens continue @@ -267,6 +359,7 @@ def parse_tokens_generator( # - quote blocks can't be nested. any quote delimiters inside a quote block # are just inline text. all other elements can appear inside a quote block # - text modifiers + children_token_in_quote_block = [] # note that in_quote won't change during the while-loop, we're just reducing # the level of indentation here by including it in the condition instead of @@ -274,11 +367,11 @@ def parse_tokens_generator( while ( not in_quote and i < len(tokens) - and tokens[i].token_type == TokenType.QUOTE_LINE_PREFIX + and LexingRule.QUOTE_LINE_PREFIX in tokens[i] ): # scan until next newline for j in range(i, len(tokens)): - if tokens[j].token_type == TokenType.NEWLINE: + if LexingRule.NEWLINE in tokens[j]: # add everything from the quote line prefix (non-inclusive) # to the newline (inclusive) as children token children_token_in_quote_block.extend(tokens[i + 1 : j + 1]) @@ -294,9 +387,12 @@ def parse_tokens_generator( if len(children_token_in_quote_block) > 0: # tell the inner parse function that it's now inside a quote block children_nodes = list( - parse_tokens_generator(children_token_in_quote_block, in_quote=True) + parse_tokens_generator( + children_token_in_quote_block, in_quote=True, custom=custom + ) ) - yield Node(NodeType.QUOTE_BLOCK, children=children_nodes) + content = "".join([token.value for token in children_token_in_quote_block]) + yield Node(NodeType.QUOTE_BLOCK, children=children_nodes, content=content) continue # if we get all the way here, than whatever token we're currently sitting on @@ -323,10 +419,11 @@ def parse_tokens_generator( def try_parse_node_with_children( tokens: List[Token], - opener: List[TokenType], - closer: List[TokenType], + opener: List[LexingRule], + closer: List[LexingRule], node_type: NodeType, in_quote: bool, + custom: Optional[Dict[str, List[str]]] = None, ) -> Tuple[Optional[Node], Optional[int]]: """ Tries identify a node at the start of the specified sequence of tokens by @@ -351,12 +448,13 @@ def try_parse_node_with_children( # check if the opener matches for opener_index in range(len(opener)): - if tokens[opener_index].token_type != opener[opener_index]: + if opener[opener_index] not in tokens[opener_index]: return None, None # try finding the matching closer and consume as few tokens as possible # (skip the first token as that has to be a child token) # TODO: edge case ***bold and italic*** doesn't work + children_token, amount_consumed_tokens = search_for_closer( tokens[len(opener) + 1 :], closer ) @@ -372,14 +470,17 @@ def try_parse_node_with_children( return ( Node( node_type, - children=list(parse_tokens_generator(children_token, in_quote)), + children=list( + parse_tokens_generator(children_token, in_quote, custom=custom) + ), + content="".join(token.value for token in children_token), ), amount_consumed_tokens, ) def search_for_closer( - tokens: List[Token], closer: List[TokenType] + tokens: List[Token], closer: List[Lexing] ) -> Tuple[Optional[List[Token]], Optional[int]]: """ Searches for a specified closing sequence in the supplied list of tokens. @@ -394,7 +495,7 @@ def search_for_closer( for token_index in range(len(tokens) - len(closer) + 1): # try matching the closer to the current position by iterating over the closer for closer_index in range(len(closer)): - if tokens[token_index + closer_index].token_type != closer[closer_index]: + if closer[closer_index] not in tokens[token_index + closer_index]: break else: # closer matched @@ -402,4 +503,4 @@ def search_for_closer( # closer didn't match, try next token_index # closer was not found - return None + return None, None diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 8663acd..0000000 --- a/poetry.lock +++ /dev/null @@ -1,452 +0,0 @@ -[[package]] -name = "aiohttp" -version = "3.7.4.post0" -description = "Async http client/server framework (asyncio)" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -async-timeout = ">=3.0,<4.0" -attrs = ">=17.3.0" -chardet = ">=2.0,<5.0" -multidict = ">=4.5,<7.0" -typing-extensions = ">=3.6.5" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["aiodns", "brotlipy", "cchardet"] - -[[package]] -name = "aiohttp-cors" -version = "0.7.0" -description = "CORS support for aiohttp" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -aiohttp = ">=1.1" - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "async-timeout" -version = "3.0.1" -description = "Timeout context manager for asyncio programs" -category = "dev" -optional = false -python-versions = ">=3.5.3" - -[[package]] -name = "attrs" -version = "21.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] - -[[package]] -name = "black" -version = "21.7b0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -aiohttp = {version = ">=3.6.0", optional = true, markers = "extra == \"d\""} -aiohttp-cors = {version = ">=0.4.0", optional = true, markers = "extra == \"d\""} -appdirs = "*" -click = ">=7.1.2" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.8.1,<1" -regex = ">=2020.1.8" -tomli = ">=0.2.6,<2.0.0" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] -python2 = ["typed-ast (>=1.4.2)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "click" -version = "8.0.1" -description = "Composable command line interface toolkit" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.4" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "idna" -version = "3.2" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "multidict" -version = "5.1.0" -description = "multidict implementation" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "mypy" -version = "0.910" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -toml = "*" -typing-extensions = ">=3.7.4" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<1.5.0)"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "pathspec" -version = "0.9.0" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "regex" -version = "2021.8.28" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "1.2.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "typing-extensions" -version = "3.10.0.0" -description = "Backported and Experimental Type Hints for Python 3.5+" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "yarl" -version = "1.6.3" -description = "Yet another URL library" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[metadata] -lock-version = "1.1" -python-versions = "^3.8" -content-hash = "8a412767941780bf114dd46f5539a473d119437e496264ce255a02d71d41cefa" - -[metadata.files] -aiohttp = [ - {file = "aiohttp-3.7.4.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-win32.whl", hash = "sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287"}, - {file = "aiohttp-3.7.4.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-win32.whl", hash = "sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f"}, - {file = "aiohttp-3.7.4.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-win32.whl", hash = "sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16"}, - {file = "aiohttp-3.7.4.post0-cp38-cp38-win_amd64.whl", hash = "sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-win32.whl", hash = "sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9"}, - {file = "aiohttp-3.7.4.post0-cp39-cp39-win_amd64.whl", hash = "sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe"}, - {file = "aiohttp-3.7.4.post0.tar.gz", hash = "sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf"}, -] -aiohttp-cors = [ - {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, - {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -async-timeout = [ - {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, - {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, -] -attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, -] -black = [ - {file = "black-21.7b0-py3-none-any.whl", hash = "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116"}, - {file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"}, -] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, -] -click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -idna = [ - {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, - {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, -] -multidict = [ - {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, - {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, - {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, - {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, - {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, - {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, - {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, - {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, - {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, - {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, - {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, - {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, - {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, -] -mypy = [ - {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, - {file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"}, - {file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"}, - {file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"}, - {file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"}, - {file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"}, - {file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"}, - {file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"}, - {file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"}, - {file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"}, - {file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"}, - {file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"}, - {file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"}, - {file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"}, - {file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"}, - {file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"}, - {file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"}, - {file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"}, - {file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"}, - {file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"}, - {file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"}, - {file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"}, - {file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -regex = [ - {file = "regex-2021.8.28-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2"}, - {file = "regex-2021.8.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a"}, - {file = "regex-2021.8.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0"}, - {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb"}, - {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a"}, - {file = "regex-2021.8.28-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308"}, - {file = "regex-2021.8.28-cp310-cp310-win32.whl", hash = "sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed"}, - {file = "regex-2021.8.28-cp310-cp310-win_amd64.whl", hash = "sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8"}, - {file = "regex-2021.8.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c"}, - {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c"}, - {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13"}, - {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0"}, - {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1"}, - {file = "regex-2021.8.28-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f"}, - {file = "regex-2021.8.28-cp36-cp36m-win32.whl", hash = "sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354"}, - {file = "regex-2021.8.28-cp36-cp36m-win_amd64.whl", hash = "sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645"}, - {file = "regex-2021.8.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a"}, - {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e"}, - {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892"}, - {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791"}, - {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759"}, - {file = "regex-2021.8.28-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906"}, - {file = "regex-2021.8.28-cp37-cp37m-win32.whl", hash = "sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a"}, - {file = "regex-2021.8.28-cp37-cp37m-win_amd64.whl", hash = "sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc"}, - {file = "regex-2021.8.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd"}, - {file = "regex-2021.8.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797"}, - {file = "regex-2021.8.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f"}, - {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256"}, - {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b"}, - {file = "regex-2021.8.28-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e"}, - {file = "regex-2021.8.28-cp38-cp38-win32.whl", hash = "sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d"}, - {file = "regex-2021.8.28-cp38-cp38-win_amd64.whl", hash = "sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2"}, - {file = "regex-2021.8.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468"}, - {file = "regex-2021.8.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb"}, - {file = "regex-2021.8.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d"}, - {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983"}, - {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8"}, - {file = "regex-2021.8.28-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed"}, - {file = "regex-2021.8.28-cp39-cp39-win32.whl", hash = "sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374"}, - {file = "regex-2021.8.28-cp39-cp39-win_amd64.whl", hash = "sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73"}, - {file = "regex-2021.8.28.tar.gz", hash = "sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"}, - {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"}, -] -typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, -] -yarl = [ - {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366"}, - {file = "yarl-1.6.3-cp36-cp36m-win32.whl", hash = "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721"}, - {file = "yarl-1.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643"}, - {file = "yarl-1.6.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970"}, - {file = "yarl-1.6.3-cp37-cp37m-win32.whl", hash = "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e"}, - {file = "yarl-1.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50"}, - {file = "yarl-1.6.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2"}, - {file = "yarl-1.6.3-cp38-cp38-win32.whl", hash = "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896"}, - {file = "yarl-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a"}, - {file = "yarl-1.6.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4"}, - {file = "yarl-1.6.3-cp39-cp39-win32.whl", hash = "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424"}, - {file = "yarl-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6"}, - {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, -]