Skip to content

Commit

Permalink
Merge pull request #18 from ykshatroff/#17-support-single-colons
Browse files Browse the repository at this point in the history
#17. Support single quotes for v-* and :* directives
  • Loading branch information
nonamenix authored Nov 19, 2017
2 parents 4b00921 + b090eb5 commit c7e25c7
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 4 deletions.
6 changes: 3 additions & 3 deletions babelvueextractor/lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
V_DIRECTIVE_PREFIX = 'v-'
COLON_DIRECTIVE_PREFIX = '\x3a' # 0x3a = ":"

tag_re = re.compile('(%s.*?%s|%s.*?%s|%s.*?%s|%s.*?%s|%s.*?%s|(?:%s|%s).+?=".*?")' % (
tag_re = re.compile('(%s.*?%s|%s.*?%s|%s.*?%s|%s.*?%s|%s.*?%s|(?:%s|%s).+?=(?:".*?"|\'.*?\'))' % (
re.escape(CONST_START), re.escape(CONST_END),
re.escape(RAW_HTML_TAG_START), re.escape(RAW_HTML_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
Expand Down Expand Up @@ -131,10 +131,10 @@ def create_token(self, token_string, in_tag):
_end = len(VARIABLE_TAG_END)
token_type = TOKEN_VAR

elif token_string.endswith('"'):
elif token_string.endswith(('"', "'")):
token_type = TOKEN_DIRECTIVE
# eg. v-text="attr" => ['v-text=', 'attr', '']
content = token_string.split('"')[1]
content = token_string.split(token_string[-1])[1]

if _start is not None:
content = token_string[_start:-_end].strip()
Expand Down
30 changes: 29 additions & 1 deletion babelvueextractor/tests/test_lexer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import unittest
from babelvueextractor.lexer import Lexer, Token, TOKEN_TEXT, TOKEN_VAR, TOKEN_COMMENT, TOKEN_RAW_HTML, TOKEN_CONST, \
TOKEN_DOUBLE_WAY_BINDING
TOKEN_DOUBLE_WAY_BINDING, TOKEN_DIRECTIVE


class TestLexer(unittest.TestCase):
Expand Down Expand Up @@ -52,6 +52,34 @@ def test_double_way_binding(self):
Token(TOKEN_DOUBLE_WAY_BINDING, "foo")
])

def test_v_attr(self):
content = "<div v-html='foo'>"
self.assertTokensEqual(
Lexer(content).tokenize(), [
Token(token_type=0, contents="<div "),
Token(TOKEN_DIRECTIVE, "foo")
])
content = '<div v-html="bar">'
self.assertTokensEqual(
Lexer(content).tokenize(), [
Token(token_type=0, contents="<div "),
Token(TOKEN_DIRECTIVE, "bar")
])

def test_colon_attr(self):
content = "<div :html='foo'>"
self.assertTokensEqual(
Lexer(content).tokenize(), [
Token(token_type=0, contents="<div "),
Token(TOKEN_DIRECTIVE, "foo")
])
content = '<div :html="bar">'
self.assertTokensEqual(
Lexer(content).tokenize(), [
Token(token_type=0, contents="<div "),
Token(TOKEN_DIRECTIVE, "bar")
])

def test_combine(self):
content = "<div>{{ gettext('Hello') }}</div>" \
"<div>{{* gettext('Hello') }}</div>" \
Expand Down

0 comments on commit c7e25c7

Please sign in to comment.