Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Processing entry-point dependencies. #5

Merged
merged 5 commits into from
Apr 28, 2016
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions pipeline_browserify/compiler.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from pipeline.compilers import SubProcessCompiler
from os.path import dirname
import json
from django.conf import settings
from django.core.exceptions import SuspiciousFileOperation

class BrowserifyCompiler(SubProcessCompiler):
output_extension = 'browserified.js'
Expand All @@ -23,3 +25,61 @@ def compile_file(self, infile, outfile, outdated=False, force=False):
)
print('\ncommand:', command)
return self.execute_command(command.split(), cwd=dirname(infile))

def is_outdated(self, infile, outfile):
"""Check if the input file is outdated.

The difficulty with the default implementation is that any file that is
`require`d from the entry-point file will not trigger a recompile if it
is modified. This overloaded version of the method corrects this by generating
a list of all required files that are also a part of the storage manifest
and checking if they've been modified since the last compile.

The command used to generate the list of dependencies is the same as the compile
command but includes the `--deps` option.

WARNING: It seems to me that just generating the dependencies may take just
as long as actually compiling, which would mean we would be better off just
forcing a compile every time.
"""

# Check for missing file or modified entry-point file.
if super(BrowserifyCompiler, self).is_outdated(infile, outfile):
return True

# Check if we've already calculated dependencies.
deps = getattr(self, '_deps', None)
if not deps:

# Collect dependency information.
command = "%s %s %s --deps %s" % (
getattr(settings, 'PIPELINE_BROWSERIFY_VARS', ''),
getattr(settings, 'PIPELINE_BROWSERIFY_BINARY', '/usr/bin/env browserify'),
getattr(settings, 'PIPELINE_BROWSERIFY_ARGUMENTS', ''),
self.storage.path(infile),
)
dep_json = self.execute_command(command) #, cwd=dirname(infile))

# Process the output data. It's JSON, and the file's path is coded
# in the "file" field. We also want to save the content of each file
# so we can check if they're outdated, which is coded under "source".
deps = []
for dep in json.loads(dep_json.decode()):

# Is this file managed by the storage?
try:
exists = self.storage.exists(dep['file'])
except SuspiciousFileOperation:
exists = None
if exists == True or exists == False:
deps.append(dep['file'])

# Cache the dependencies for the next possible run.
self._deps = deps

# Test the dependencies to see if they're out of date.
for dep in deps:
if super(BrowserifyCompiler, self).is_outdated(dep, outfile):
return True

return False