forked from cwensel/cascading.samples
-
Notifications
You must be signed in to change notification settings - Fork 31
/
Copy pathbuild.gradle
157 lines (118 loc) · 4.45 KB
/
build.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
/*
* Copyright (c) 2007-2015 Concurrent, Inc. All Rights Reserved.
*
* Project and contact information: http://www.concurrentinc.com/
*/
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'eclipse'
evaluationDependsOn( 'logparser' )
evaluationDependsOn( 'loganalysis' )
evaluationDependsOn( 'wordcount' )
evaluationDependsOn( 'hadoop' )
repositories {
mavenLocal()
mavenCentral()
maven { url 'http://conjars.org/repo/' }
maven { url 'https://repository.apache.org/content/repositories/releases/' }
}
if( project.properties[ 'teamcity' ] ) // make them system properties
System.properties.putAll( project.properties[ 'teamcity' ] )
if( System.properties[ 'aws.properties' ] )
{
file( System.properties[ 'aws.properties' ] ).withReader { reader ->
def awsProperties = new Properties()
awsProperties.load( reader )
System.properties.putAll( awsProperties )
}
}
def hadoopHome = System.properties[ 'hadoop.home' ]
ext.cascadingVersionMajor = '3.0.0'
ext.commandLines = [
logparser: "jar logparser.jar data/apache.200.txt output",
loganalysis: "jar loganalysis.jar data/apache.200.txt output",
wordcount: "jar wordcount.jar data/url+page.200.txt output local",
hadoop: "jar hadoop.jar data/apache.200.txt output"
]
ext.numParts = [
logparser: 1,
loganalysis: 3,
wordcount: 5,
hadoop: 1
]
subprojects {
configurations {
sshAntTask
s3AntTask
}
dependencies {
sshAntTask 'org.apache.ant:ant-jsch:1.7.1', 'jsch:jsch:0.1.29'
s3AntTask 'thirdparty:awstasks:0.3'
}
def verifyPath = "${buildDir}/verify/"
def execPath = "${verifyPath}/${project.name}"
task unpackDist( dependsOn: 'dist' ) {
ext.archivePath = tasks[ 'dist' ].archivePath
}
unpackDist << {
ant.untar( src: archivePath, dest: verifyPath, compression: "gzip" )
}
task execSample( dependsOn: unpackDist ) {
description = 'execute all samples using $hadoop.home property, disable with $execsample.skip=true'
enabled = System.properties[ 'execsample.skip' ] != 'true'
}
execSample << {
assert hadoopHome
ant.exec( dir: execPath, executable: "${hadoopHome}/bin/yarn", output: "${verifyPath}/console.txt" ) {
arg( line: commandLines[ project.name ] )
}
}
execSample << {
assert fileTree( execPath ).include( '**/_SUCCESS' ).getFiles().size() == numParts[ project.name ]
println "${project.name} PASSED exec dist tests"
}
task s3Upload( dependsOn: execSample ) {
ext.awsAccessId = System.properties[ 'publish.aws.accessId' ]
ext.awsSecretKey = System.properties[ 'publish.aws.secretKey' ]
ext.s3Bucket = System.properties[ 'publish.bucket' ]
ext.remotePath = "samples/${cascadingVersionMajor}/${project.name}/"
}
s3Upload << {
ant.taskdef( name: 's3Upload', classname: 'dak.ant.taskdefs.S3Upload',
classpath: configurations.s3AntTask.asPath )
def currentPath = new File( buildDir, 'latest.txt' )
currentPath.write( "http://${s3Bucket}/${remotePath}${dist.archivePath.name}" )
ant.s3Upload( verbose: 'true', accessId: awsAccessId, secretKey: awsSecretKey,
bucket: s3Bucket, prefix: remotePath, publicRead: 'true' ) {
fileset( file: dist.archivePath )
fileset( file: currentPath )
}
currentPath.delete()
}
task sitePublish( dependsOn: s3Upload ) << {
def publishBucket = System.properties[ 'publish.bucket' ]
def publishDownloadPath = System.properties[ 'publish.download.path' ]
def publishPort = !System.properties[ 'publish.port' ] ? '22' : System.properties[ 'publish.port' ]
def publishKeyFile = System.properties[ 'publish.keyfile' ]
def currentPath = new File( buildDir, 'latest.txt' )
currentPath.write( "http://${publishBucket}/samples/${cascadingVersionMajor}/${project.name}/${dist.archivePath.name}" )
ant.taskdef( name: 'scp', classname: 'org.apache.tools.ant.taskdefs.optional.ssh.Scp',
classpath: configurations.sshAntTask.asPath )
def remoteToFile = "${publishDownloadPath}/samples/${cascadingVersionMajor}/${project.name}/latest.txt"
ant.scp( file: currentPath, remoteToFile: remoteToFile,
keyfile: publishKeyFile, passphrase: '', port: publishPort, trust: 'true' )
currentPath.delete()
}
}
task updateBuildFile() << {
subprojects.each { sub ->
if( sub.name != 'hadoop' )
{
copy {
from 'sample.build.gradle'
into sub.projectDir
rename { file -> 'build.gradle'}
}
}
}
}