Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add sourcemaps on ClosureCompilerFilter #815

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
36 changes: 30 additions & 6 deletions compressor/base.py
@@ -1,7 +1,9 @@
from __future__ import with_statement, unicode_literals
import base64
import os
import codecs
from importlib import import_module
import re

from django.core.files.base import ContentFile
from django.utils.safestring import mark_safe
Expand Down Expand Up @@ -289,16 +291,21 @@ def output(self, mode='file', forced=False):
any custom modification. Calls other mode specific methods or simply
returns the content directly.
"""
output = '\n'.join(self.filter_input(forced))

if not output:
outputs = self.filter_input(forced)
if settings.COMPRESS_OFFLINE_GROUP_FILES:
outputs = ['\n'.join(outputs)]

if not outputs:
return ''

if settings.COMPRESS_ENABLED or forced:
filtered_output = self.filter_output(output)
return self.handle_output(mode, filtered_output, forced)

return output
filtered_outputs = []
for output in outputs:
filtered_output = self.filter_output(output)
filtered_outputs.append(self.handle_output(mode, filtered_output, forced))
outputs = filtered_outputs
return '\n'.join(outputs)

def handle_output(self, mode, content, forced, basename=None):
# Then check for the appropriate output method and call it
Expand All @@ -315,6 +322,23 @@ def output_file(self, mode, content, forced=False, basename=None):
the appropriate template with the file's URL.
"""
new_filepath = self.get_filepath(content, basename=basename)
if settings.COMPRESS_OFFLINE_SOURCEMAPS_ON_FILES:
new_map_filepath = '%s.map' % new_filepath

source_mapping_url_re = '(//[#|@] *sourceMappingURL=data:application/json;base64,' \
'(([A-Za-z0-9-]{4})*([A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?))|' \
'(/\*[#|@] *sourceMappingURL=data:application/json;base64,' \
'(([A-Za-z0-9-]{4})*([A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?)) *\*/'
m = re.search(source_mapping_url_re, content)
if m:
b = m.group(2) if m.group(2) else m.group(6)
map = base64.standard_b64decode(b)
if not self.storage.exists(new_map_filepath) or forced:
self.storage.save(new_map_filepath, ContentFile(map.encode(self.charset)))
content = re.sub(
source_mapping_url_re,
'//# sourceMappingURL=%s' % os.path.basename(new_map_filepath),
content)
if not self.storage.exists(new_filepath) or forced:
self.storage.save(new_filepath, ContentFile(content.encode(self.charset)))
url = mark_safe(self.storage.url(new_filepath))
Expand Down
4 changes: 4 additions & 0 deletions compressor/cache.py
Expand Up @@ -133,6 +133,8 @@ def get_precompiler_cachekey(command, contents):


def cache_get(key):
if settings.COMPRESS_OFFLINE_GROUP_FILES:
key = 'G|%s' % key
packed_val = cache.get(key)
if packed_val is None:
return None
Expand All @@ -147,6 +149,8 @@ def cache_get(key):


def cache_set(key, val, refreshed=False, timeout=None):
if settings.COMPRESS_OFFLINE_GROUP_FILES:
key = 'G|%s' % key
if timeout is None:
timeout = settings.COMPRESS_REBUILD_TIMEOUT
refresh_time = timeout + time.time()
Expand Down
5 changes: 5 additions & 0 deletions compressor/conf.py
Expand Up @@ -39,6 +39,7 @@ class CompressorConf(AppConf):
CACHEABLE_PRECOMPILERS = ()
CLOSURE_COMPILER_BINARY = 'java -jar compiler.jar'
CLOSURE_COMPILER_ARGUMENTS = ''
CLOSURE_COMPILER_SOURCEMAPS = False
YUI_BINARY = 'java -jar yuicompressor.jar'
YUI_CSS_ARGUMENTS = ''
YUI_JS_ARGUMENTS = ''
Expand Down Expand Up @@ -68,6 +69,10 @@ class CompressorConf(AppConf):
OFFLINE_CONTEXT = {}
# The name of the manifest file (e.g. filename.ext)
OFFLINE_MANIFEST = 'manifest.json'
# If false the files are not grouped in a single file
OFFLINE_GROUP_FILES = True
# If True, write the Base64 source maps on a separated file
OFFLINE_SOURCEMAPS_ON_FILES = False
# The Context to be used when TemplateFilter is used
TEMPLATE_FILTER_CONTEXT = {}
# Placeholder to be used instead of settings.COMPRESS_URL during offline compression.
Expand Down
2 changes: 1 addition & 1 deletion compressor/css.py
Expand Up @@ -47,5 +47,5 @@ def output(self, *args, **kwargs):
for media, subnode in self.media_nodes:
subnode.extra_context.update({'media': media})
ret.append(subnode.output(*args, **kwargs))
return ''.join(ret)
return '\n'.join(ret)
return super(CssCompressor, self).output(*args, **kwargs)
52 changes: 52 additions & 0 deletions compressor/filters/closure.py
@@ -1,5 +1,11 @@
import base64
import json
import os
from django.core.files.temp import NamedTemporaryFile
import io
from compressor.conf import settings
from compressor.filters import CompilerFilter
from compressor.exceptions import FilterError


class ClosureCompilerFilter(CompilerFilter):
Expand All @@ -8,3 +14,49 @@ class ClosureCompilerFilter(CompilerFilter):
("binary", settings.COMPRESS_CLOSURE_COMPILER_BINARY),
("args", settings.COMPRESS_CLOSURE_COMPILER_ARGUMENTS),
)
minfile = None

def __init__(self, content, **kwargs):
super(ClosureCompilerFilter, self).__init__(content, **kwargs)

if settings.COMPRESS_CLOSURE_COMPILER_SOURCEMAPS and not settings.COMPRESS_OFFLINE_GROUP_FILES:
self.command = self.command.replace('{args}', '--create_source_map {mapfile} {args}')

def input(self, **kwargs):
encoding = self.default_encoding
options = dict(self.options)

if "{mapfile}" in self.command and "mapfile" not in options:
# create temporary mapfile file if needed
ext = self.type and ".%s.map" % self.type or ""
self.minfile = NamedTemporaryFile(mode='r+', suffix=ext)
options["mapfile"] = self.minfile.name

self.options = options

filtered = super(ClosureCompilerFilter, self).input(**kwargs)

try:
mapfile_path = options.get('mapfile')
if mapfile_path:
with io.open(mapfile_path, 'r', encoding=encoding) as file:
map = file.read()

map_dict = json.loads(map)
sources = map_dict['sources']
sources[sources.index('stdin')] = kwargs['elem']['attrs_dict']['src']
map_dict['file'] = os.path.basename(kwargs['elem']['attrs_dict']['src'])
map_dict['sources'] = sources
map = json.dumps(map_dict)

filtered = '%s\n//# sourceMappingURL=data:application/json;base64,%s' % (
filtered,
base64.standard_b64encode(map)
)
except (IOError, OSError) as e:
raise FilterError('Unable add source map %s (%r): %s' %
(self.__class__.__name__, self.command, e))
finally:
if self.minfile is not None:
self.minfile.close()
return filtered
2 changes: 2 additions & 0 deletions compressor/js.py
Expand Up @@ -27,6 +27,8 @@ def split_contents(self):
extra = ' defer'
else:
extra = ''
if 'crossorigin' in attribs:
extra += ' crossorigin'
# Append to the previous node if it had the same attribute
append_to_previous = (self.extra_nodes and
self.extra_nodes[-1][0] == extra)
Expand Down
89 changes: 82 additions & 7 deletions compressor/tests/test_base.py
Expand Up @@ -184,6 +184,13 @@ def test_css_return_if_on(self):
output = css_tag('/static/CACHE/css/e41ba2cc6982.css')
self.assertEqual(output, self.css_node.output().strip())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_css_return_if_on_no_group(self):
output = '\n'.join([css_tag('/static/CACHE/css/cdd1a7452e1d.css'),
css_tag('/static/CACHE/css/13c6e6521347.css'),
css_tag('/static/CACHE/css/652d0fbfecf5.css')])
self.assertEqual(output, self.css_node.output().strip())

def test_js_split(self):
out = [
(
Expand Down Expand Up @@ -211,16 +218,37 @@ def test_js_output(self):
out = '<script type="text/javascript" src="/static/CACHE/js/d728fc7f9301.js"></script>'
self.assertEqual(out, self.js_node.output())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_js_output_no_group(self):
out = '\n'.join(['<script type="text/javascript" src="/static/CACHE/js/153fcbd56af1.js"></script>',
'<script type="text/javascript" src="/static/CACHE/js/188074e83ceb.js"></script>'])
self.assertEqual(out, self.js_node.output())

def test_js_override_url(self):
self.js_node.context.update({'url': 'This is not a url, just a text'})
out = '<script type="text/javascript" src="/static/CACHE/js/d728fc7f9301.js"></script>'
self.assertEqual(out, self.js_node.output())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_js_override_url_no_group(self):
self.js_node.context.update({'url': 'This is not a url, just a text'})
out = '\n'.join(['<script type="text/javascript" src="/static/CACHE/js/153fcbd56af1.js"></script>',
'<script type="text/javascript" src="/static/CACHE/js/188074e83ceb.js"></script>'])
self.assertEqual(out, self.js_node.output())

def test_css_override_url(self):
self.css_node.context.update({'url': 'This is not a url, just a text'})
output = css_tag('/static/CACHE/css/e41ba2cc6982.css')
self.assertEqual(output, self.css_node.output().strip())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_css_override_url_no_group(self):
self.css_node.context.update({'url': 'This is not a url, just a text'})
output = '\n'.join([css_tag('/static/CACHE/css/cdd1a7452e1d.css'),
css_tag('/static/CACHE/css/13c6e6521347.css'),
css_tag('/static/CACHE/css/652d0fbfecf5.css')])
self.assertEqual(output, self.css_node.output().strip())

@override_settings(COMPRESS_PRECOMPILERS=(), COMPRESS_ENABLED=False)
def test_js_return_if_off(self):
self.assertEqualCollapsed(self.js, self.js_node.output())
Expand All @@ -229,21 +257,45 @@ def test_js_return_if_on(self):
output = '<script type="text/javascript" src="/static/CACHE/js/d728fc7f9301.js"></script>'
self.assertEqual(output, self.js_node.output())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_js_return_if_on_no_group(self):
output = '\n'.join(['<script type="text/javascript" src="/static/CACHE/js/153fcbd56af1.js"></script>',
'<script type="text/javascript" src="/static/CACHE/js/188074e83ceb.js"></script>'])
self.assertEqual(output, self.js_node.output())

@override_settings(COMPRESS_OUTPUT_DIR='custom')
def test_custom_output_dir1(self):
output = '<script type="text/javascript" src="/static/custom/js/d728fc7f9301.js"></script>'
self.assertEqual(output, JsCompressor(self.js).output())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_custom_output_dir1_no_group(self):
output = '\n'.join(['<script type="text/javascript" src="/static/CACHE/js/153fcbd56af1.js"></script>',
'<script type="text/javascript" src="/static/CACHE/js/188074e83ceb.js"></script>'])
self.assertEqual(output, JsCompressor(self.js).output())

@override_settings(COMPRESS_OUTPUT_DIR='')
def test_custom_output_dir2(self):
output = '<script type="text/javascript" src="/static/js/d728fc7f9301.js"></script>'
self.assertEqual(output, JsCompressor(self.js).output())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_custom_output_dir2_no_group(self):
output = '\n'.join(['<script type="text/javascript" src="/static/CACHE/js/153fcbd56af1.js"></script>',
'<script type="text/javascript" src="/static/CACHE/js/188074e83ceb.js"></script>'])
self.assertEqual(output, JsCompressor(self.js).output())

@override_settings(COMPRESS_OUTPUT_DIR='/custom/nested/')
def test_custom_output_dir3(self):
output = '<script type="text/javascript" src="/static/custom/nested/js/d728fc7f9301.js"></script>'
self.assertEqual(output, JsCompressor(self.js).output())

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_custom_output_dir3_no_group(self):
output = '\n'.join(['<script type="text/javascript" src="/static/CACHE/js/153fcbd56af1.js"></script>',
'<script type="text/javascript" src="/static/CACHE/js/188074e83ceb.js"></script>'])
self.assertEqual(output, JsCompressor(self.js).output())

@override_settings(COMPRESS_PRECOMPILERS=(
('text/foobar', 'compressor.tests.test_base.TestPrecompiler'),
), COMPRESS_ENABLED=True)
Expand Down Expand Up @@ -338,18 +390,41 @@ def setUp(self):
<script type="text/javascript">obj.value = "value";</script>
<script src="/static/js/one.js" type="text/javascript" async></script>
<script src="/static/js/two.js" type="text/javascript" async></script>
<script src="/static/js/three.js" type="text/javascript"></script>
<script type="text/javascript">obj.value = "value";</script>
<script src="/static/js/one.js" type="text/javascript" crossorigin></script>
<script src="/static/js/two.js" type="text/javascript" crossorigin></script>
<script src="/static/js/three.js" type="text/javascript"></script>
<script src="/static/js/one.js" type="text/javascript" crossorigin async></script>
<script src="/static/js/two.js" type="text/javascript" crossorigin></script>
<script src="/static/js/three.js" type="text/javascript"></script>"""

def _extract_attr(self, tag):
if tag.has_attr('async') and tag.has_attr('crossorigin'):
return 'crossorigin async'
if tag.has_attr('defer') and tag.has_attr('crossorigin'):
return 'crossorigin defer'
if tag.has_attr('async'):
return 'async'
if tag.has_attr('defer'):
return 'defer'
if tag.has_attr('crossorigin'):
return 'crossorigin'

def test_js_output(self):
def extract_attr(tag):
if tag.has_attr('async'):
return 'async'
if tag.has_attr('defer'):
return 'defer'
js_node = JsCompressor(self.js)
output = [None, 'async', 'defer', None, 'async', None]
output = [None, 'async', 'defer', None, 'async', None, 'crossorigin', None, 'crossorigin async', 'crossorigin', None]
scripts = make_soup(js_node.output()).find_all('script')
attrs = [self._extract_attr(s) for s in scripts]
self.assertEqual(output, attrs)

@override_settings(COMPRESS_OFFLINE_GROUP_FILES=False)
def test_js_output_no_group(self):
js_node = JsCompressor(self.js)
output = [None, 'async', 'defer', None, 'async', 'async', None, None, 'crossorigin', 'crossorigin', None,
'crossorigin async', 'crossorigin', None]
scripts = make_soup(js_node.output()).find_all('script')
attrs = [extract_attr(s) for s in scripts]
attrs = [self._extract_attr(s) for s in scripts]
self.assertEqual(output, attrs)


Expand Down
26 changes: 24 additions & 2 deletions compressor/tests/test_filters.py
Expand Up @@ -408,14 +408,26 @@ def test_template_filter(self):
self.assertEqual(input, TemplateFilter(content).input())


class SpecializedFiltersTest(TestCase):
class ClosureFiltersTest(TestCase):
"""
Test to check the Specializations of filters.
Test to check the Closure filter.
"""
def test_closure_filter(self):
filter = ClosureCompilerFilter('')
self.assertEqual(filter.command, '{binary} {args}')
self.assertEqual(filter.options, (('binary', six.text_type('java -jar compiler.jar')), ('args', six.text_type(''))))

@override_settings(COMPRESS_CLOSURE_COMPILER_SOURCEMAPS=True, COMPRESS_OFFLINE_GROUP_FILES=False)
def test_source_map(self):
filter = ClosureCompilerFilter('')
self.assertEqual(filter.command, '{binary} --create_source_map {mapfile} {args}')
self.assertEqual(filter.options, (('binary', six.text_type('java -jar compiler.jar')), ('args', six.text_type(''))))


class YuglifyFiltersTest(TestCase):
"""
Test to check the Yuglify filter.
"""
def test_yuglify_filters(self):
filter = YUglifyCSSFilter('')
self.assertEqual(filter.command, '{binary} {args} --type=css')
Expand All @@ -425,6 +437,11 @@ def test_yuglify_filters(self):
self.assertEqual(filter.command, '{binary} {args} --type=js')
self.assertEqual(filter.options, (('binary', six.text_type('yuglify')), ('args', six.text_type('--terminal'))))


class YuiFiltersTest(TestCase):
"""
Test to check the Yui filter.
"""
def test_yui_filters(self):
filter = YUICSSFilter('')
self.assertEqual(filter.command, '{binary} {args} --type=css')
Expand All @@ -434,6 +451,11 @@ def test_yui_filters(self):
self.assertEqual(filter.command, '{binary} {args} --type=js --verbose')
self.assertEqual(filter.options, (('binary', six.text_type('java -jar yuicompressor.jar')), ('args', six.text_type('')), ('verbose', 1)))


class CleanCssFiltersTest(TestCase):
"""
Test to check the CleanCss filter.
"""
def test_clean_css_filter(self):
filter = CleanCSSFilter('')
self.assertEqual(filter.options, (('binary', six.text_type('cleancss')), ('args', six.text_type(''))))