https://github.com/mongodb/mongo-hadoop
Raw File
Tip revision: 20208a027ad8638e56dfcf040773f176d6ee059f authored by Alexander Golin on 28 January 2022, 19:28:02 UTC
Merge pull request #160 from mongodb/DRIVERS-2036
Tip revision: 20208a0
build.gradle
import org.apache.tools.ant.filters.ReplaceTokens

apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'download-task'
apply plugin: 'com.github.ben-manes.versions'

ext.configDir = new File(rootDir, 'config')
ext.hadoopBinaries = "${rootDir}/hadoop-binaries".toString()
ext.javaDriverVersion = '3.2.1'
ext.hiveVersion = System.getenv("HIVE_VERSION") ?: '1.2.1'
ext.pigVersion = System.getenv("PIG_VERSION") ?: '0.15.0'
ext.hadoopVersion = System.getenv("HADOOP_VERSION") ?: '2.7.2'
ext.sparkVersion = System.getenv("SPARK_VERSION") ?: '1.6.0'
ext.scalaVersion = System.getenv("SCALA_VERSION") ?: '2.11'
ext.isHadoopV1 = "$hadoopVersion".startsWith("1.")

ext.hadoopHome = System.getenv("HADOOP_HOME") ?: "${hadoopBinaries}/hadoop-${hadoopVersion}".toString()
ext.hiveHome = System.getenv("HIVE_HOME") ?: "${hadoopBinaries}/apache-hive-${hiveVersion}-bin".toString()
ext.pigHome = System.getenv("PIG_HOME") ?: "${hadoopBinaries}/pig-${pigVersion}".toString()
ext.dataHome = "${hadoopBinaries}/examples/data".toString()
ext.docsHome = "${rootDir}/docs"
ext.mongoimport = '/usr/local/bin/mongoimport';
ext.mongorestore = '/usr/local/bin/mongorestore';
ext.mongoURI = System.getenv("MONGO_INPUT_URI")
ext.mongoAuthURI = System.getenv("MONGO_AUTH_URI")
ext.mongoUsername = System.getenv("MONGO_USER")
ext.mongoPassword = System.getenv("MONGO_PASSWORD")

buildscript {
    repositories {
        mavenLocal()
        mavenCentral()
        jcenter()
        maven { url 'https://oss.sonatype.org/content/repositories/snapshots' }
    }
    dependencies {
        classpath 'org.codehaus.groovy:groovy-backports-compat23:2.3.5'
        classpath 'org.zeroturnaround:zt-exec:1.6'

        classpath 'com.bmuschko:gradle-nexus-plugin:2.1.1'
        classpath 'me.trnl:clirr-gradle-plugin:0.4'
        classpath 'de.undercouch:gradle-download-task:1.0'
        classpath 'com.github.ben-manes:gradle-versions-plugin:0.6'
    }
}

//apply from: 'gradle/maven-deployment.gradle'
apply from: 'gradle/functions.gradle'
apply from: 'gradle/hadoop.gradle'

task wrapper(type: Wrapper) {
    gradleVersion = '2.2.1'
}

if(project.hasProperty("clusterVersion")) {
    println "The cluster version property has been obsoleted.  See https://jira.mongodb.org/browse/HADOOP-160 for details."
}

allprojects {
    version = '2.0.2'
    group = 'org.mongodb.mongo-hadoop'
}

configure(subprojects) {
    apply plugin: 'java'
    apply plugin: 'idea'
    apply plugin: 'eclipse'
    apply plugin: 'checkstyle'
    apply plugin: 'findbugs'
    apply plugin: 'com.bmuschko.nexus'

    repositories {
        mavenCentral()
        mavenLocal()
        maven { url "https://repository.cloudera.com/artifactory/cloudera-repos/" }
    }


    sourceCompatibility = JavaVersion.VERSION_1_6
    targetCompatibility = JavaVersion.VERSION_1_6

/*
    configurations {
        tests
        tests.extendsFrom(testCompile)

        provided
        provided.extendsFrom(compile)
    }

    sourceSets.main.compileClasspath += configurations.provided
    sourceSets.test.compileClasspath += configurations.provided
    sourceSets.test.runtimeClasspath += configurations.provided
*/

    dependencies {
        compile "org.mongodb:mongo-java-driver:${javaDriverVersion}"

        testCompile 'junit:junit:4.11'
        testCompile 'org.hamcrest:hamcrest-all:1.3'
        testCompile 'org.zeroturnaround:zt-exec:1.6'
        testCompile 'com.jayway.awaitility:awaitility:1.6.0'
        testCompile 'commons-daemon:commons-daemon:1.0.15'

        if (isHadoopV1) {
            testCompile "org.apache.hadoop:hadoop-test:${hadoopVersion}"
        } else {
            testCompile "org.apache.hadoop:hadoop-hdfs:${hadoopVersion}"
            testCompile "org.apache.hadoop:hadoop-hdfs:${hadoopVersion}:tests"
            testCompile "org.apache.hadoop:hadoop-common:${hadoopVersion}:tests"
            testCompile "org.apache.hadoop:hadoop-yarn-server-tests:${hadoopVersion}:tests"
            testCompile "org.apache.hadoop:hadoop-mapreduce-client-jobclient:${hadoopVersion}:tests"
        }
    }

    /* Compiling */
    tasks.withType(AbstractCompile) {
        options.encoding = 'ISO-8859-1'
        options.fork = true
        options.debug = true
        options.compilerArgs = [/*'-Xlint:deprecation',*/ '-Xlint:-options']
    }

    project.ext.buildingWith = { n ->
        project.hasProperty(n) && project.property(n).toBoolean()
    }

//    /* Testing */
//    task testJar(type: Jar, dependsOn: testClasses) {
//        from sourceSets.test.output
//    }

    tasks.withType(Test) {
        maxParallelForks = 1
        systemProperty 'project.version', project.version
        systemProperty 'cluster.version', hadoopVersion
        systemProperty 'hadoop.version', hadoopVersion
        systemProperty 'hive.version', hiveVersion
        systemProperty 'pig.version', pigVersion
        systemProperties << System.getProperties()

        beforeTest { descr ->
            logger.info("[Test ${descr.className} > ${descr.name}]")
        }

        finalizedBy ':shutdownCluster'
    }

    test.dependsOn ':startCluster'
    clean.dependsOn ':cleanHadoop'

    /* Code quality */
    checkstyle {
        configFile = new File("$configDir/checkstyle.xml")
    }

    checkstyleTest {
        classpath += configurations.compile
        classpath += configurations.testCompile
    }

    checkstyleMain {
        classpath += configurations.compile
    }

    task quickCheck(dependsOn: [checkstyleMain, checkstyleTest] ) << {}

    findbugs {
        excludeFilter = new File("$configDir/findbugs-exclude.xml")
        sourceSets = [sourceSets.main]
    }

    tasks.withType(FindBugs) {
        reports {
            xml.enabled = project.buildingWith('xmlReports.enabled')
            html.enabled = !project.buildingWith('xmlReports.enabled')
        }
    }

    javadoc {
        options.version = true
        options.links 'http://docs.oracle.com/javase/8/docs/api/'
        options.links 'http://hadoop.apache.org/docs/r2.7.2/api'
        options.links 'http://api.mongodb.org/java/3.2/'
        if (JavaVersion.current().isJava8Compatible()) {
          options.addStringOption('Xdoclint:none', '-quiet')
        }
    }

    gradle.taskGraph.whenReady { taskGraph ->
        copyFiles.dependsOn 'jar', 'testsJar'
    }

//    artifacts {
//        tests testJar
//    }

    test {
        dependsOn 'jar', 'testsJar', ':startCluster', ':downloadEnronEmails'
    }

    modifyPom {
        project {
            name = 'MongoDB Connector for Hadoop'
            description = 'The MongoDB Connector for Hadoop is a plugin for Hadoop that provides the ability to use MongoDB as an input source and/or an output destination.'
            url 'https://github.com/mongodb/mongo-hadoop.git'

            scm {
                url 'https://github.com/mongodb/mongo-hadoop.git'
                connection 'scm:git:git://github.com/mongodb/mongo-hadoop.git'
            }

            licenses {
                license {
                    name 'The Apache Software License, Version 2.0'
                    url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
                    distribution 'repo'
                }
            }

            developers {
                developer {
                    name 'Various'
                    organization = 'MongoDB'
                }
            }
        }

        whenConfigured { resultPom ->
            resultPom.dependencies.removeAll { dep -> dep.scope != 'compile' }
            resultPom.dependencies*.scope = null
        }
    }

    extraArchive {
        sources = true
        tests = true
        javadoc = true
    }

    nexus {
        sign = true
    }
}

task apidocs {
    // Depends on "javadoc" task for all projects.
    dependsOn {
        subprojects*.getTasksByName("javadoc", true)
    }
    // Exclude examples.
    def noExamples = subprojects.findAll {
        proj -> !proj.getName().contains("examples/")
    }
    // Create docs directory if it doesn't already exist.
    def docsHomeDir = new File(docsHome)
    if (!docsHomeDir.exists()) {
        docsHomeDir.mkdir()
    }
    noExamples.each {
        proj -> copy {
            from new File(proj.docsDir, "javadoc")
            into new File(docsHomeDir, proj.getName())
        }
    }
    // Generate an index.html.
    new File("docs/index.html").withWriter { writer ->
        writer.write("<html><head><title>MongoDB Hadoop Connector " +
                "Modules</title></head><body><h3>MongoDB Hadoop Connector " +
                "Modules</h3><ul>")
        noExamples.each { proj ->
            writer.append("<li><a href='" + proj.getName() + "/index.html' " +
                    "rel='nofollow'>" + proj.getName() + "</li>")
        }
        writer.append("</ul></body></html>")
    }
}

idea {
    module {
        excludeDirs += file('hadoop-binaries')
    }
}

configure(subprojects.findAll { it.name.contains('examples/') }) {
    def exampleName = project.name.split('/')[1]
    jar {
        baseName = exampleName
    }
    group += ".mongo-hadoop-examples"
}

project(":core") {
    archivesBaseName = "mongo-hadoop-core"

    dependencies {
        if (isHadoopV1) {
            compile "org.apache.hadoop:hadoop-core:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-client:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-tools:${hadoopVersion}"
        } else {
            compile "org.apache.hadoop:hadoop-common:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-mapreduce-client-core:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-mapreduce-client-common:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-mapreduce-client-shuffle:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-mapreduce-client-app:${hadoopVersion}"
            compile "org.apache.hadoop:hadoop-mapreduce-client-jobclient:${hadoopVersion}"
        }
        testCompile "org.mockito:mockito-core:1.10.19"
    }
}

project(':spark') {
    apply plugin: 'scala'
    archivesBaseName = "mongo-hadoop-spark"
    dependencies {
        compile "org.apache.spark:spark-core_${scalaVersion}:${sparkVersion}"
        compile project(':core')
    }

    jar {
        from project(':core').sourceSets.main.output
        from sourceSets.main.output

        configurations.compile.filter {
            it.name.startsWith('mongo-java-driver')
        }.each {
            from zipTree(it)
        }
    }

    extraArchive {
        tests = false
    }

    test {
        dependsOn.remove('testsJar')
    }
}

project(':hive') {
    archivesBaseName = 'mongo-hadoop-hive'

    configurations {
        // Dependencies of hive-exec, but not available on Maven.
        all*.exclude group: 'org.pentaho', module: 'pentaho-aggdesigner-algorithm'
        all*.exclude group: 'eigenbase', module: 'eigenbase-properties'
    }

    test {
        onlyIf { JavaVersion.current().isJava7Compatible() }
    }

    dependencies {
        compile project(':core')

        compile "org.apache.hive:hive-exec:${hiveVersion}"
        compile "org.apache.hive:hive-serde:${hiveVersion}"

        testCompile "org.apache.hive:hive-cli:${hiveVersion}"
        testCompile "org.apache.hive:hive-service:${hiveVersion}"
        testCompile "org.apache.hive:hive-jdbc:${hiveVersion}"

        testCompile files(project(':core').sourceSets.test.output)

        testCompile("com.nitayjoffe.thirdparty.com.jointhegrid:hive_test:4.0.0")  {
            exclude group: 'org.apache.derby', module: 'derby'
            exclude group: 'org.apache.hadoop', module: 'hadoop-core'
        }
    }
}

project(":pig") {
    archivesBaseName = "mongo-hadoop-pig"

    dependencies {
        compile project(":core")
        if (isHadoopV1) {
            compile "org.apache.pig:pig:${pigVersion}"
        } else {
            compile "org.apache.pig:pig:${pigVersion}:h2"
        }
        compile 'joda-time:joda-time:2.7'

        testCompile files(project(':core').sourceSets.main.output)
        testCompile files(project(':core').sourceSets.test.output)
        testCompile "org.apache.pig:pigunit:${pigVersion}"
        testCompile "org.antlr:antlr:3.5.2"
        testCompile "org.mockito:mockito-core:1.10.19"
    }

    processTestResources {
        outputs.upToDateWhen{ false }
        filter ReplaceTokens, tokens: [
                JAVA_DRIVER_JAR : project(':core').configurations.compile.find { it.name.startsWith("mongo-java-driver") }.toString(),
                PROJECT_VERSION : project(':core').version,
                PROJECT_HOME    : project.rootDir.getAbsolutePath(),
                PIG_RESOURCES   : project(':pig').sourceSets.test.output.resourcesDir.toString()
        ]
    }

    jar {
        from project(':core').sourceSets.main.output
        from sourceSets.main.output

        configurations.compile.filter {
            it.name.startsWith('mongo-java-driver')
        }.each {
            from zipTree(it)
        }
    }

}

project(":streaming") {
    archivesBaseName = "mongo-hadoop-streaming"

    dependencies {
        compile project(":core")
        compile "org.apache.hadoop:hadoop-streaming:${hadoopVersion}"

        testCompile "org.mockito:mockito-core:1.10.19"
    }

    jar {
        from project(':core').sourceSets.main.output
        from sourceSets.main.output

        configurations.compile.filter {
            it.name.startsWith('mongo-java-driver')
        }.each {
            from zipTree(it)
        }
    }
}

project(":flume") {
    dependencies {
        compile project(":core")
        compile("com.cloudera:flume-core:0.9.4-cdh3u3") {
            exclude group: 'org.apache.hadoop', module: 'hadoop-core'
            exclude group: 'com.cloudera.cdh', module: 'hadoop-ant'
        }
    }
}

project(":examples/treasury_yield") {
    uploadArchives.onlyIf { false }
    dependencies {
        compile project(':core')

        testCompile files(project(':core').sourceSets.test.output)
        testCompile project(':streaming')
        testCompile 'org.slf4j:slf4j-jdk14:1.7.7'
    }
}

project(":examples/enron") {
    uploadArchives.onlyIf { false }
    dependencies {
        compile project(':core')
    }
}

project(":examples/enron/spark") {
    archivesBaseName = "mongo-spark-enron"

    dependencies {
        compile project(":core")
        compile project(":spark")
        compile "org.apache.spark:spark-sql_${scalaVersion}:${sparkVersion}"
    }

    jar {
        from sourceSets.main.output
        from project(':spark').sourceSets.main.output
        from project(':core').sourceSets.main.output

        configurations.compile.filter {
            it.name.startsWith('mongo-java-driver')
        }.each {
            from zipTree(it)
        }
    }
}

project(":examples/sensors") {
    uploadArchives.onlyIf { false }
    dependencies {
        compile project(":core")
    }
}

project(":examples/shakespeare") {
    uploadArchives.onlyIf { false }
    dependencies {
        compile project(":core")
    }
}

task cleanLogs(type: Delete) {
    delete fileTree(dir: "logs", exclude: ".keep")
}

task cleanHadoop(type: Delete, dependsOn: cleanLogs) {
    delete hadoopHome, hiveHome, pigHome
}
back to top