Skip to content

Commit

Permalink
Merge pull request #5 from furious-luke/master
Browse files Browse the repository at this point in the history
Processing entry-point dependencies.
  • Loading branch information
j0hnsmith committed Apr 28, 2016
2 parents 48697db + 85c6a05 commit 9472928
Showing 1 changed file with 60 additions and 0 deletions.
60 changes: 60 additions & 0 deletions pipeline_browserify/compiler.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from pipeline.compilers import SubProcessCompiler
from os.path import dirname
import json
from django.conf import settings
from django.core.exceptions import SuspiciousFileOperation

class BrowserifyCompiler(SubProcessCompiler):
output_extension = 'browserified.js'
Expand All @@ -23,3 +25,61 @@ def compile_file(self, infile, outfile, outdated=False, force=False):
)
print('\ncommand:', command)
return self.execute_command(command.split(), cwd=dirname(infile))

def is_outdated(self, infile, outfile):
"""Check if the input file is outdated.
The difficulty with the default implementation is that any file that is
`require`d from the entry-point file will not trigger a recompile if it
is modified. This overloaded version of the method corrects this by generating
a list of all required files that are also a part of the storage manifest
and checking if they've been modified since the last compile.
The command used to generate the list of dependencies is the same as the compile
command but includes the `--deps` option.
WARNING: It seems to me that just generating the dependencies may take just
as long as actually compiling, which would mean we would be better off just
forcing a compile every time.
"""

# Check for missing file or modified entry-point file.
if super(BrowserifyCompiler, self).is_outdated(infile, outfile):
return True

# Check if we've already calculated dependencies.
deps = getattr(self, '_deps', None)
if not deps:

# Collect dependency information.
command = "%s %s %s --deps %s" % (
getattr(settings, 'PIPELINE_BROWSERIFY_VARS', ''),
getattr(settings, 'PIPELINE_BROWSERIFY_BINARY', '/usr/bin/env browserify'),
getattr(settings, 'PIPELINE_BROWSERIFY_ARGUMENTS', ''),
self.storage.path(infile),
)
dep_json = self.execute_command(command) #, cwd=dirname(infile))

# Process the output data. It's JSON, and the file's path is coded
# in the "file" field. We also want to save the content of each file
# so we can check if they're outdated, which is coded under "source".
deps = []
for dep in json.loads(dep_json.decode()):

# Is this file managed by the storage?
try:
exists = self.storage.exists(dep['file'])
except SuspiciousFileOperation:
exists = None
if exists == True or exists == False:
deps.append(dep['file'])

# Cache the dependencies for the next possible run.
self._deps = deps

# Test the dependencies to see if they're out of date.
for dep in deps:
if super(BrowserifyCompiler, self).is_outdated(dep, outfile):
return True

return False

0 comments on commit 9472928

Please sign in to comment.