diff --git a/.travis.yml b/.travis.yml index 39763896..83963b26 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: java sudo: false # faster builds jdk: -# - oraclejdk7 - oraclejdk8 cache: directories: diff --git a/pom.xml b/pom.xml index f284eb1c..f5a2b100 100644 --- a/pom.xml +++ b/pom.xml @@ -2,7 +2,7 @@ 4.0.0 net.alchim31.maven scala-maven-plugin - 3.4.7-SNAPSHOT + 4.0.0-SNAPSHOT maven-plugin scala-maven-plugin @@ -99,8 +99,8 @@ UTF-8 github - 1.6 - 1.6 + 1.8 + 1.8 3.0 3.3.9 UTF-8 @@ -218,10 +218,14 @@ - com.typesafe.zinc - zinc - 0.3.15 - compile + org.scala-sbt + zinc_2.12 + 1.2.5 + + + org.scala-lang.modules + scala-java8-compat_2.12 + 0.9.0 @@ -276,6 +280,13 @@ maven-site-plugin 3.7.1 + + + org.apache.bcel + bcel + 6.3 + + maven-jxr-plugin @@ -348,7 +359,6 @@ 3.8.0 -Xlint:deprecation - 1.6 @@ -389,7 +399,7 @@ org.codehaus.mojo.signature - java15 + java18 1.0 @@ -577,8 +587,8 @@ 2.7.7 2.8.2 - 2.12.6 - 2.12.6 + 2.12.8 + 2.12.8 2.12 2.10.2-SNAPSHOT org.scala-lang.macro-paradise diff --git a/src/main/java/sbt_inc/SbtIncrementalCompiler.java b/src/main/java/sbt_inc/SbtIncrementalCompiler.java index 4b4e997a..5b2c7dc7 100644 --- a/src/main/java/sbt_inc/SbtIncrementalCompiler.java +++ b/src/main/java/sbt_inc/SbtIncrementalCompiler.java @@ -1,178 +1,140 @@ package sbt_inc; -import com.typesafe.zinc.Compiler; -import com.typesafe.zinc.*; +import scala.compat.java8.functionConverterImpls.*; + import org.apache.maven.plugin.logging.Log; -import org.apache.maven.toolchain.Toolchain; +import sbt.internal.inc.*; +import sbt.internal.inc.FileAnalysisStore; +import sbt.internal.inc.ScalaInstance; +import sbt.internal.inc.classpath.ClasspathUtilities; import scala.Option; -import scala_maven_executions.MainHelper; -import util.JavaLocator; +import scala_maven.VersionNumber; +import xsbti.Logger; +import xsbti.T2; +import xsbti.compile.*; +import xsbti.compile.AnalysisStore; +import xsbti.compile.CompilerCache; import java.io.File; +import java.net.URL; +import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; -import java.util.Map; +import java.util.Optional; +import java.util.function.Function; public class SbtIncrementalCompiler { - public static final String SBT_GROUP_ID = "com.typesafe.sbt"; - public static final String COMPILER_INTEGRATION_ARTIFACT_ID = "incremental-compiler"; - public static final String COMPILER_INTERFACE_ARTIFACT_ID = "compiler-interface"; - public static final String COMPILER_INTERFACE_CLASSIFIER = "sources"; - public static final String XSBTI_ARTIFACT_ID = "sbt-interface"; - - private static final String ANALYSIS_MAP_ARG_SEPARATOR = ","; - private static final String ANALYSIS_MAP_PAIR_SEPARATOR = File.pathSeparator; - - private Log log; - - private ZincClient zinc; - - private boolean useServer = false; - - private File compilerJar; - - private File libraryJar; - - private List extraJars; - - private List extraArgs; - - private xsbti.Logger logger; - - private Compiler compiler; - - public SbtIncrementalCompiler(boolean useZincServer, String zincHost, int zincPort, File libraryJar, File compilerJar, List extraJars, File xsbtiJar, File interfaceJar, Log l, List args) throws Exception { - this.log = l; - if (useZincServer) { - this.zinc = new ZincClient(zincHost, zincPort); - if (zinc.serverAvailable()) { - l.info("Using zinc server for incremental compilation"); - this.useServer = true; - this.compilerJar = compilerJar; - this.libraryJar = libraryJar; - this.extraJars = extraJars; - this.extraArgs = args; - } else { - l.warn("Zinc server is not available at port " + zincPort + " - reverting to normal incremental compile"); - this.useServer = false; + public static final String SBT_GROUP_ID = "org.scala-sbt"; + public static final String ZINC_ARTIFACT_ID = "zinc"; + public static final String COMPILER_BRIDGE_ARTIFACT_ID = "compiler-bridge"; + + private final Logger logger; + private final IncrementalCompilerImpl compiler; + private final Compilers compilers; + private final Setup setup; + private final AnalysisStore analysisStore; + + public SbtIncrementalCompiler(File libraryJar, File reflectJar, File compilerJar, VersionNumber scalaVersion, List extraJars, File compilerBridgeJar, Log l, List args, File cacheFile) throws Exception { + l.info("Using incremental compilation"); + if (args.size() > 0) l.warn("extra args for zinc are ignored in non-server mode"); + this.logger = new SbtLogger(l); + + List allJars = new ArrayList<>(extraJars); + allJars.add(libraryJar); + allJars.add(reflectJar); + allJars.add(compilerJar); + + ScalaInstance scalaInstance = new ScalaInstance( + scalaVersion.toString(), // version + new URLClassLoader(new URL[]{libraryJar.toURI().toURL(), reflectJar.toURI().toURL(), compilerJar.toURI().toURL()}), // loader + ClasspathUtilities.rootLoader(), // loaderLibraryOnly + libraryJar, // libraryJar + compilerJar, // compilerJar + allJars.toArray(new File[]{}), // allJars + Option.apply(scalaVersion.toString()) // explicitActual + ); + + compiler = new IncrementalCompilerImpl(); + + AnalyzingCompiler scalaCompiler = new AnalyzingCompiler( + scalaInstance, // scalaInstance + ZincCompilerUtil.constantBridgeProvider(scalaInstance, compilerBridgeJar), //provider + ClasspathOptionsUtil.auto(), // classpathOptions + new FromJavaConsumer<>(noop -> { + }), //FIXME foo -> {}, // onArgsHandler + Option.apply(null) // classLoaderCache + ); + + compilers = compiler.compilers(scalaInstance, ClasspathOptionsUtil.boot(), Option.apply(null), scalaCompiler); + + PerClasspathEntryLookup lookup = new PerClasspathEntryLookup() { + @Override + public Optional analysis(File classpathEntry) { + return Optional.empty(); } - } - if (!useServer) { - l.info("Using incremental compilation"); - if (args.size() > 0) l.warn("extra args for zinc are ignored in non-server mode"); - this.logger = new SbtLogger(l); - Setup setup = Setup.create(compilerJar, libraryJar, extraJars, xsbtiJar, interfaceJar, null, false); - if (l.isDebugEnabled()) Setup.debug(setup, logger); - this.compiler = Compiler.create(setup, logger); - } - } - - private IncOptions defaultOptions() { - sbt.inc.IncOptions defaultSbtOptions = sbt.inc.IncOptions.Default(); - return new IncOptions( - defaultSbtOptions.transitiveStep(), - defaultSbtOptions.recompileAllFraction(), - defaultSbtOptions.relationsDebug(), - defaultSbtOptions.apiDebug(), - defaultSbtOptions.apiDiffContextSize(), - defaultSbtOptions.apiDumpDirectory(), - false, - Option.empty(), - defaultSbtOptions.recompileOnMacroDef(), - defaultSbtOptions.nameHashing()); - } - public void compile(File baseDir, List classpathElements, List sources, File classesDirectory, List scalacOptions, List javacOptions, File cacheFile, Map cacheMap, String compileOrder, Toolchain toolchain) throws Exception { - if (useServer) { - zincCompile(baseDir, classpathElements, sources, classesDirectory, scalacOptions, javacOptions, cacheFile, cacheMap, compileOrder, toolchain); - } else { - if (log.isDebugEnabled()) log.debug("Incremental compiler = " + compiler + " [" + Integer.toHexString(compiler.hashCode()) + "]"); - List classpath = pathsToFiles(classpathElements); - Inputs inputs = Inputs.create(classpath, sources, classesDirectory, scalacOptions, javacOptions, cacheFile, cacheMap, compileOrder, defaultOptions(), true); - if (log.isDebugEnabled()) Inputs.debug(inputs, logger); - compiler.compile(inputs, logger); - } - } - - private void zincCompile(File baseDir, List classpathElements, List sources, File classesDirectory, List scalacOptions, List javacOptions, File cacheFile, Map cacheMap, String compileOrder, Toolchain toolchain) throws Exception { - List arguments = new ArrayList(extraArgs); - arguments.add("-log-level"); - arguments.add(logLevelToString(log)); - arguments.add("-scala-compiler"); - arguments.add(compilerJar.getAbsolutePath()); - arguments.add("-scala-library"); - arguments.add(libraryJar.getAbsolutePath()); - arguments.add("-scala-extra"); - List extraPaths = new ArrayList(); - for (File extraJar : extraJars) { - extraPaths.add(extraJar.getAbsolutePath()); - } - arguments.add(MainHelper.toMultiPath(extraPaths)); - if (!classpathElements.isEmpty()) { - arguments.add("-classpath"); - arguments.add(MainHelper.toMultiPath(classpathElements)); - } - arguments.add("-d"); - arguments.add(classesDirectory.getAbsolutePath()); - for (String scalacOption : scalacOptions) { - arguments.add("-S" + scalacOption); - } - - String javaHome = JavaLocator.findHomeFromToolchain(toolchain); - if (javaHome != null) { - log.info("Toolchain in scala-maven-plugin: " + javaHome); - arguments.add("-java-home"); - arguments.add(javaHome); - } - - for (String javacOption : javacOptions) { - arguments.add("-C" + javacOption); - } - arguments.add("-compile-order"); - arguments.add(compileOrder); - arguments.add("-analysis-cache"); - arguments.add(cacheFile.getAbsolutePath()); - arguments.add("-analysis-map"); - arguments.add(cacheMapToString(cacheMap)); - for (File source : sources) { - arguments.add(source.getAbsolutePath()); - } - - int exitCode = zinc.run(arguments, baseDir, System.out, System.err); - - if (exitCode != 0) { - xsbti.Problem[] problems = null; - throw new sbt.compiler.CompileFailed(arguments.toArray(new String[arguments.size()]), "Compile failed via zinc server", problems); - } + @Override + public DefinesClass definesClass(File classpathEntry) { + return Locate.definesClass(classpathEntry); + } + }; + + LoggedReporter reporter = new LoggedReporter(100, logger, pos -> pos); + + analysisStore = AnalysisStore.getCachedStore(FileAnalysisStore.binary(cacheFile)); + + setup = + compiler.setup( + lookup, // lookup + false, // skip + cacheFile, // cacheFile + CompilerCache.fresh(), // cache + IncOptions.of(), // incOptions + reporter, // reporter + Option.apply(null), // optionProgress + new T2[]{} + ); } - private List pathsToFiles(List paths) { - List files = new ArrayList(paths.size()); - for (String path : paths) { - files.add(new File(path)); + public void compile(List classpathElements, List sources, File classesDirectory, List scalacOptions, List javacOptions, String compileOrder) { + + Inputs inputs = compiler.inputs( + classpathElements.stream().map(File::new).toArray(size -> new File[size]), //classpath + sources.toArray(new File[]{}), // sources + classesDirectory, // classesDirectory + scalacOptions.toArray(new String[]{}), // scalacOptions + javacOptions.toArray(new String[]{}), // javacOptions + 100, // maxErrors + new Function[]{}, // sourcePositionMappers + toCompileOrder(compileOrder), // order + compilers, + setup, + compiler.emptyPreviousResult() + ); + + Optional analysisContents = analysisStore.get(); + if (analysisContents.isPresent()) { + AnalysisContents analysisContents0 = analysisContents.get(); + CompileAnalysis previousAnalysis = analysisContents0.getAnalysis(); + MiniSetup previousSetup = analysisContents0.getMiniSetup(); + PreviousResult previousResult = PreviousResult.of(Optional.of(previousAnalysis), Optional.of(previousSetup)); + inputs = inputs.withPreviousResult(previousResult); } - return files; - } - private String logLevelToString(Log l) { - if (l.isDebugEnabled()) return "debug"; - else if (l.isInfoEnabled()) return "info"; - else if (l.isWarnEnabled()) return "warn"; - else if (l.isErrorEnabled()) return "error"; - else return "info"; + CompileResult newResult = compiler.compile(inputs, logger); + analysisStore.set(AnalysisContents.create(newResult.analysis(), newResult.setup())); } - private String cacheMapToString(Map cacheMap) throws Exception { - String analysisMap = ""; - boolean addArgSeparator = false; - for (Map.Entry entry : cacheMap.entrySet()) { - if (addArgSeparator) analysisMap += ANALYSIS_MAP_ARG_SEPARATOR; - analysisMap += entry.getKey().getAbsolutePath(); - analysisMap += ANALYSIS_MAP_PAIR_SEPARATOR; - analysisMap += entry.getValue().getAbsolutePath(); - addArgSeparator = true; + private CompileOrder toCompileOrder(String name) { + if (name.equalsIgnoreCase(CompileOrder.Mixed.name())) { + return CompileOrder.Mixed; + } else if (name.equalsIgnoreCase(CompileOrder.JavaThenScala.name())) { + return CompileOrder.JavaThenScala; + } else if (name.equalsIgnoreCase(CompileOrder.ScalaThenJava.name())) { + return CompileOrder.ScalaThenJava; + } else { + throw new IllegalArgumentException("Unknown compileOrder: " + name); } - return analysisMap; } } diff --git a/src/main/java/sbt_inc/SbtLogger.java b/src/main/java/sbt_inc/SbtLogger.java index 5c5e7bdf..2ad3ee3a 100644 --- a/src/main/java/sbt_inc/SbtLogger.java +++ b/src/main/java/sbt_inc/SbtLogger.java @@ -1,49 +1,43 @@ package sbt_inc; import org.apache.maven.plugin.logging.Log; -import xsbti.F0; -import xsbti.Logger; +import sbt.util.Level; +import sbt.util.Logger; +import scala.Enumeration; +import scala.Function0; -public class SbtLogger implements Logger { +public class SbtLogger extends Logger { - Log log; + private final Log log; public SbtLogger(Log l) { this.log = l; } @Override - public void error(F0 msg) { - if (log.isErrorEnabled()) { - log.error(msg.apply()); - } - } - - @Override - public void warn(F0 msg) { - if (log.isWarnEnabled()) { - log.warn(msg.apply()); + public void trace(Function0 t) { + if (log.isDebugEnabled()) { + log.debug(t.apply()); } } @Override - public void info(F0 msg) { + public void success(Function0 message) { if (log.isInfoEnabled()) { - log.info(msg.apply()); + log.info("Success: " + message.apply()); } } @Override - public void debug(F0 msg) { - if (log.isDebugEnabled()) { - log.debug(msg.apply()); - } - } - - @Override - public void trace(F0 exception) { - if (log.isDebugEnabled()) { - log.debug(exception.apply()); + public void log(Enumeration.Value level, Function0 message) { + if (level.equals(Level.Error())) { + log.error(message.apply()); + } else if (level.equals(Level.Warn())) { + log.warn(message.apply()); + } else if (level.equals(Level.Info())) { + log.info(message.apply()); + } else if (level.equals(Level.Debug())) { + log.debug(message.apply()); } } } diff --git a/src/main/java/scala_maven/ScalaCompilerSupport.java b/src/main/java/scala_maven/ScalaCompilerSupport.java index e96ae18b..5828ab03 100644 --- a/src/main/java/scala_maven/ScalaCompilerSupport.java +++ b/src/main/java/scala_maven/ScalaCompilerSupport.java @@ -3,17 +3,11 @@ import java.io.File; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.LinkedList; import java.util.List; -import java.util.Map; -import org.apache.maven.model.Plugin; -import org.apache.maven.plugin.descriptor.PluginDescriptor; import org.apache.maven.plugins.annotations.Parameter; -import org.apache.maven.project.MavenProject; import org.codehaus.plexus.util.StringUtils; -import org.codehaus.plexus.util.xml.Xpp3Dom; import sbt_inc.SbtIncrementalCompiler; import scala_maven_executions.JavaMainCaller; @@ -77,24 +71,6 @@ public abstract class ScalaCompilerSupport extends ScalaSourceMojoSupport { @Parameter(property = "compileOrder", defaultValue = "mixed") private String compileOrder; - /** - * Use zinc server for incremental recompilation. - */ - @Parameter(property = "useZincServer", defaultValue = "false") - private boolean useZincServer; - - /** - * Zinc server port, if running with incremental zinc server mode. - */ - @Parameter(property = "zincPort", defaultValue = "3030") - private int zincPort; - - /** - * Zinc server host, if running with incremental zinc server mode. Defaults to 127.0.0.1. - */ - @Parameter(property = "zincHost", defaultValue = "127.0.0.1") - private String zincHost; - /** * Additional parameter to use to call zinc server * It is a pipe '|' separated list of arguments, so it can be used from command @@ -128,7 +104,7 @@ protected void doExecute() throws Exception { protected int compile(List sourceRootDirs, File outputDir, File analysisCacheFile, List classpathElements, boolean compileInLoop) throws Exception, InterruptedException { if (!compileInLoop && INCREMENTAL.equals(recompileMode)) { - // TODO - Do we really need this dupliated here? + // TODO - Do we really need this duplicated here? if (!outputDir.exists()) { outputDir.mkdirs(); } @@ -209,7 +185,7 @@ protected List getFilesToCompile(List sourceRootDirs, long lastSucce // failed with "error while loading Xxx, class file '.../target/classes/.../Xxxx.class' is broken" // (restore how it work in 2.11 and failed in 2.12) //TODO a better behavior : if there is at least one .scala to compile then add all .java, if there is at least one .java then add all .scala (because we don't manage class dependency) - List files = new ArrayList(sourceFiles.size()); + List files = new ArrayList<>(sourceFiles.size()); if (_lastCompileAt > 0 || (!ALL.equals(recompileMode) && (lastSuccessfullCompileTime > 0))) { ArrayList modifiedScalaFiles = new ArrayList(sourceFiles.size()); ArrayList modifiedJavaFiles = new ArrayList(sourceFiles.size()); @@ -289,7 +265,7 @@ void setLastSuccessfullTS(long v) throws Exception { // @SuppressWarnings("unchecked") - protected int incrementalCompile(List classpathElements, List sourceRootDirs, File outputDir, File cacheFile, boolean compileInLoop) throws Exception, InterruptedException { + protected int incrementalCompile(List classpathElements, List sourceRootDirs, File outputDir, File cacheFile, boolean compileInLoop) throws Exception { List sources = findSourceWithFilters(sourceRootDirs); if (sources.isEmpty()) { return -1; @@ -297,27 +273,21 @@ protected int incrementalCompile(List classpathElements, List sour if (incremental == null) { File libraryJar = getLibraryJar(); + File reflectJar = getReflectJar(); File compilerJar = getCompilerJar(); List extraJars = getCompilerDependencies(); extraJars.remove(libraryJar); - String sbtGroupId = SbtIncrementalCompiler.SBT_GROUP_ID; - String xsbtiArtifactId = SbtIncrementalCompiler.XSBTI_ARTIFACT_ID; - String compilerInterfaceArtifactId = SbtIncrementalCompiler.COMPILER_INTERFACE_ARTIFACT_ID; - String compilerInterfaceClassifier = SbtIncrementalCompiler.COMPILER_INTERFACE_CLASSIFIER; - String sbtVersion = findVersionFromPluginArtifacts(sbtGroupId, SbtIncrementalCompiler.COMPILER_INTEGRATION_ARTIFACT_ID); - File xsbtiJar = getPluginArtifactJar(sbtGroupId, xsbtiArtifactId, sbtVersion); - List zincArgs = StringUtils.isEmpty(addZincArgs) ? new LinkedList() : (List) Arrays.asList(StringUtils.split(addZincArgs, "|")); - File interfaceSrcJar = getPluginArtifactJar(sbtGroupId, compilerInterfaceArtifactId, sbtVersion, compilerInterfaceClassifier); - incremental = new SbtIncrementalCompiler(useZincServer, zincHost, zincPort, libraryJar, compilerJar, extraJars, xsbtiJar, interfaceSrcJar, getLog(), zincArgs); + File compilerBridgeJar = getCompilerBridgeJar(); + List zincArgs = StringUtils.isEmpty(addZincArgs) ? new LinkedList<>() : Arrays.asList(StringUtils.split(addZincArgs, "|")); + incremental = new SbtIncrementalCompiler(libraryJar, reflectJar, compilerJar, findScalaVersion(), extraJars, compilerBridgeJar, getLog(), zincArgs, cacheFile); } classpathElements.remove(outputDir.getAbsolutePath()); List scalacOptions = getScalaOptions(); List javacOptions = getJavacOptions(); - Map cacheMap = getAnalysisCacheMap(); try { - incremental.compile(project.getBasedir(), classpathElements, sources, outputDir, scalacOptions, javacOptions, cacheFile, cacheMap, compileOrder, toolchainManager.getToolchainFromBuildContext("jdk", session)); + incremental.compile(classpathElements, sources, outputDir, scalacOptions, javacOptions, compileOrder); } catch (xsbti.CompileFailed e) { if (compileInLoop) { compileErrors = true; @@ -328,36 +298,4 @@ protected int incrementalCompile(List classpathElements, List sour return 1; } - - protected Map getAnalysisCacheMap() { - HashMap map = new HashMap(); - String scalaPluginKey = ((PluginDescriptor) getPluginContext().get("pluginDescriptor")).getPluginLookupKey(); - for (MavenProject project1 : reactorProjects) { - Plugin plugin = project1.getPlugin(scalaPluginKey); - if (plugin != null) { - Xpp3Dom configuration = (Xpp3Dom) plugin.getConfiguration(); - Xpp3Dom analysisCache = (configuration != null) ? configuration.getChild("analysisCacheFile") : null; - File analysisCacheFile = (analysisCache != null) ? new File(analysisCache.getValue()) : defaultAnalysisCacheFile(project1); - File classesDirectory = new File(project1.getBuild().getOutputDirectory()); - map.put(classesDirectory.getAbsoluteFile(), analysisCacheFile.getAbsoluteFile()); - Xpp3Dom testAnalysisCache = (configuration != null) ? configuration.getChild("testAnalysisCacheFile") : null; - File testAnalysisCacheFile = (testAnalysisCache != null) ? new File(testAnalysisCache.getValue()) : defaultTestAnalysisCacheFile(project1); - File testClassesDirectory = new File(project1.getBuild().getTestOutputDirectory()); - map.put(testClassesDirectory.getAbsoluteFile(), testAnalysisCacheFile.getAbsoluteFile()); - } - } - return map; - } - - protected File defaultAnalysisDirectory(MavenProject p) { - return new File(p.getBuild().getDirectory(), "analysis"); - } - - protected File defaultAnalysisCacheFile(MavenProject p) { - return new File(defaultAnalysisDirectory(p), "compile"); - } - - protected File defaultTestAnalysisCacheFile(MavenProject p) { - return new File(defaultAnalysisDirectory(p), "test-compile"); - } } diff --git a/src/main/java/scala_maven/ScalaMojoSupport.java b/src/main/java/scala_maven/ScalaMojoSupport.java index 4e32de96..23a7228f 100644 --- a/src/main/java/scala_maven/ScalaMojoSupport.java +++ b/src/main/java/scala_maven/ScalaMojoSupport.java @@ -41,6 +41,7 @@ import org.apache.maven.toolchain.ToolchainManager; import org.codehaus.plexus.util.StringUtils; +import sbt_inc.SbtIncrementalCompiler; import scala_maven_dependency.CheckScalaVersionVisitor; import scala_maven_dependency.ScalaDistroArtifactFilter; import scala_maven_executions.JavaMainCaller; @@ -51,6 +52,7 @@ public abstract class ScalaMojoSupport extends AbstractMojo { public static final String SCALA_LIBRARY_ARTIFACTID = "scala-library"; + public static final String SCALA_REFLECT_ARTIFACTID = "scala-reflect"; public static final String SCALA_COMPILER_ARTIFACTID = "scala-compiler"; /** @@ -314,7 +316,7 @@ public String getScalaOrganization() { * => getAbsolutePath) * * @see https://github.com/davidB/maven-scala-plugin/issues/50 + * "https://github.com/davidB/scala-maven-plugin/issues/50">https://github.com/davidB/scala-maven-plugin/issues/50 */ @Parameter(property = "maven.scala.useCanonicalPath", defaultValue = "true") protected boolean useCanonicalPath = true; @@ -447,7 +449,7 @@ protected final Set resolveDependencyArtifacts(final Artifact artifact * @param collectionFilter an {@link ArtifactFilter} used to determine which * members dependency graph should be downloaded. * @param remoteRepositories a {@link List} of remote {@link - * ArtifactRespository} values to used for dependency resolution of + * ArtifactRepository} values to used for dependency resolution of * the provided {@link Artifact}. * @param localRepository the local {@link ArtifactRepository} to use for * dependency resolution of the given {@link Artifact}. @@ -481,7 +483,7 @@ protected final Set resolveDependencyArtifacts(final Artifact artifact * @param collectionFilter an {@link ArtifactFilter} used to determine which * members dependency graph should be downloaded. * @param remoteRepositories a {@link List} of remote {@link - * ArtifactRespository} values to used for dependency resolution of + * ArtifactRepository} values to used for dependency resolution of * the provided {@link Artifact}. * @param localRepository the local {@link ArtifactRepository} to use for * dependency resolution of the given {@link Artifact}. @@ -863,19 +865,33 @@ protected List getJavacOptions() throws Exception { protected File getLibraryJar() throws Exception { if (StringUtils.isNotEmpty(scalaHome)) { File lib = new File(scalaHome, "lib"); - return new File(lib, "scala-library.jar"); + return new File(lib, SCALA_LIBRARY_ARTIFACTID + ".jar"); } return getArtifactJar(getScalaOrganization(), SCALA_LIBRARY_ARTIFACTID, findScalaVersion().toString()); } + protected File getReflectJar() throws Exception { + if (StringUtils.isNotEmpty(scalaHome)) { + File lib = new File(scalaHome, "lib"); + return new File(lib, SCALA_REFLECT_ARTIFACTID + ".jar"); + } + return getArtifactJar(getScalaOrganization(), SCALA_REFLECT_ARTIFACTID, findScalaVersion().toString()); + } + protected File getCompilerJar() throws Exception { if(StringUtils.isNotEmpty(scalaHome)) { File lib = new File(scalaHome, "lib"); - return new File(lib, "scala-compiler.jar"); + return new File(lib, SCALA_COMPILER_ARTIFACTID + ".jar"); } return getArtifactJar(getScalaOrganization(), SCALA_COMPILER_ARTIFACTID, findScalaVersion().toString()); } + protected File getCompilerBridgeJar() throws Exception { + VersionNumber scalaVersion = findScalaVersion(); + String zincVersion = findVersionFromPluginArtifacts(SbtIncrementalCompiler.SBT_GROUP_ID, scalaVersion.applyScalaArtifactVersioningScheme(SbtIncrementalCompiler.ZINC_ARTIFACT_ID)); + return getArtifactJar(SbtIncrementalCompiler.SBT_GROUP_ID, scalaVersion.applyScalaArtifactVersioningScheme(SbtIncrementalCompiler.COMPILER_BRIDGE_ARTIFACT_ID), zincVersion); + } + protected List getCompilerDependencies() throws Exception { List d = new ArrayList(); if(StringUtils.isEmpty(scalaHome)) { diff --git a/src/main/java/scala_maven/VersionNumber.java b/src/main/java/scala_maven/VersionNumber.java index 551c5522..e20e4c5c 100644 --- a/src/main/java/scala_maven/VersionNumber.java +++ b/src/main/java/scala_maven/VersionNumber.java @@ -78,6 +78,9 @@ public boolean isZero() { return (major == 0) && (minor == 0) && (bugfix == 0); } + public String applyScalaArtifactVersioningScheme(String name) { + return name + '_' + major + '.' + minor; + } } class VersionNumberMask extends VersionNumber {