git-svn-id: https://svn.apache.org/repos/asf/ant/core/trunk@450611 13f79535-47bb-0310-9956-ffa450edef68master
@@ -36,6 +36,9 @@ Other changes: | |||
* Extending JAR-Task for SPI. Bugzilla report 31520. | |||
* Added <tokens> resource collection for convenient creation of string | |||
resources from other resources' content. Inspired by Bugzilla 40504. | |||
Changes from Ant 1.7.0Beta1 to Ant 1.7.0Beta2 | |||
============================================= | |||
@@ -317,6 +317,8 @@ Ant's "legacy" datatypes have been modified to behave as Resource Collections: | |||
<li><a href="#sort">sort</a> - sorted resource collection</li> | |||
<li><a href="#first">first</a> - first <i>n</i> resources from a | |||
nested collection</li> | |||
<li><a href="#tokens">tokens</a> - <a href="#string">string</a> tokens | |||
gathered from a nested collection</li> | |||
<li><a href="#union">union</a> - set union of nested resource collections</li> | |||
<li><a href="#intersect">intersect</a> - set intersection | |||
of nested resource collections</li> | |||
@@ -727,6 +729,36 @@ larger collection.</p> | |||
<p>A single resource collection is required.</p> | |||
</blockquote> | |||
<h4><a name="tokens">tokens</a></h4> | |||
<p>Includes the <a href="#string">string</a> tokens gathered from a nested | |||
resource collection. Uses the same tokenizers supported by the | |||
<a href="filterchain.html#tokenfilter">TokenFilter</a>. Hint: imaginative | |||
use of this resource collection can implement equivalents for such Unix | |||
functions as <code>sort</code>, <code>grep -c</code>, <code>wc</code> and | |||
<code>wc -l</code>.</p> | |||
<blockquote> | |||
<table border="1" cellpadding="2" cellspacing="0"> | |||
<tr> | |||
<td valign="top"><b>Attribute</b></td> | |||
<td valign="top"><b>Description</b></td> | |||
<td align="center" valign="top"><b>Required</b></td> | |||
</tr> | |||
<tr> | |||
<td valign="top">encoding</td> | |||
<td valign="top">The encoding of the nested resources</td> | |||
<td valign="top" align="center">No, default is platform default</td> | |||
</tr> | |||
<tr> | |||
<td valign="top">cache</td> | |||
<td valign="top">Whether to cache results; disabling | |||
may seriously impact performance</td> | |||
<td valign="top" align="center">No, default <i>true</i></td> | |||
</tr> | |||
</table> | |||
<h4>Parameters specified as nested elements</h4> | |||
<p>A single resource collection is required.</p> | |||
</blockquote> | |||
<h4><a name="setlogic">Set operations</a></h4> | |||
<blockquote> | |||
<p>The following resource collections implement set operations:</p> | |||
@@ -56,6 +56,7 @@ sort=org.apache.tools.ant.types.resources.Sort | |||
resources=org.apache.tools.ant.types.resources.Resources | |||
first=org.apache.tools.ant.types.resources.First | |||
tarfileset=org.apache.tools.ant.types.TarFileSet | |||
tokens=org.apache.tools.ant.types.resources.Tokens | |||
#Resources (single-element ResourceCollections): | |||
resource=org.apache.tools.ant.types.Resource | |||
@@ -68,3 +69,8 @@ tarentry=org.apache.tools.ant.types.resources.TarResource | |||
gzipresource=org.apache.tools.ant.types.resources.GZipResource | |||
bzip2resource=org.apache.tools.ant.types.resources.BZip2Resource | |||
javaresource=org.apache.tools.ant.types.resources.JavaResource | |||
#tokenizer implementations | |||
linetokenizer=org.apache.tools.ant.util.LineTokenizer | |||
stringtokenizer=org.apache.tools.ant.util.StringTokenizer | |||
filetokenizer=org.apache.tools.ant.util.FileTokenizer |
@@ -0,0 +1,129 @@ | |||
/* | |||
* Licensed to the Apache Software Foundation (ASF) under one or more | |||
* contributor license agreements. See the NOTICE file distributed with | |||
* this work for additional information regarding copyright ownership. | |||
* The ASF licenses this file to You under the Apache License, Version 2.0 | |||
* (the "License"); you may not use this file except in compliance with | |||
* the License. You may obtain a copy of the License at | |||
* | |||
* http://www.apache.org/licenses/LICENSE-2.0 | |||
* | |||
* Unless required by applicable law or agreed to in writing, software | |||
* distributed under the License is distributed on an "AS IS" BASIS, | |||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
* See the License for the specific language governing permissions and | |||
* limitations under the License. | |||
* | |||
*/ | |||
package org.apache.tools.ant.types.resources; | |||
import java.io.InputStreamReader; | |||
import java.io.IOException; | |||
import java.io.UnsupportedEncodingException; | |||
import java.util.Stack; | |||
import java.util.ArrayList; | |||
import java.util.Collection; | |||
import java.util.Collections; | |||
import org.apache.tools.ant.Project; | |||
import org.apache.tools.ant.BuildException; | |||
import org.apache.tools.ant.types.DataType; | |||
import org.apache.tools.ant.types.ResourceCollection; | |||
import org.apache.tools.ant.types.resources.StringResource; | |||
import org.apache.tools.ant.util.ConcatResourceInputStream; | |||
import org.apache.tools.ant.util.LineTokenizer; | |||
import org.apache.tools.ant.util.Tokenizer; | |||
/** | |||
* ResourceCollection consisting of StringResources gathered from tokenizing | |||
* another ResourceCollection with a Tokenizer implementation. | |||
* @since Ant 1.7 | |||
*/ | |||
public class Tokens extends BaseResourceCollectionWrapper { | |||
private Tokenizer tokenizer; | |||
private String encoding; | |||
/** | |||
* Sort the contained elements. | |||
* @return a Collection of Resources. | |||
*/ | |||
protected synchronized Collection getCollection() { | |||
ResourceCollection rc = getResourceCollection(); | |||
if (rc.size() == 0) { | |||
return Collections.EMPTY_SET; | |||
} | |||
if (tokenizer == null) { | |||
tokenizer = new LineTokenizer(); | |||
} | |||
ConcatResourceInputStream cat = new ConcatResourceInputStream(rc); | |||
cat.setManagingComponent(this); | |||
InputStreamReader rdr = null; | |||
if (encoding == null) { | |||
rdr = new InputStreamReader(cat); | |||
} else { | |||
try { | |||
rdr = new InputStreamReader(cat, encoding); | |||
} catch (UnsupportedEncodingException e) { | |||
throw new BuildException(e); | |||
} | |||
} | |||
ArrayList result = new ArrayList(); | |||
try { | |||
for (String s = tokenizer.getToken(rdr); s != null; s = tokenizer.getToken(rdr)) { | |||
result.add(new StringResource(s)); | |||
} | |||
} catch (IOException e) { | |||
throw new BuildException("Error reading tokens", e); | |||
} | |||
return result; | |||
} | |||
/** | |||
* Set the encoding used to create the tokens. | |||
* @param encoding the encoding to use. | |||
*/ | |||
public synchronized void setEncoding(String encoding) { | |||
this.encoding = encoding; | |||
} | |||
/** | |||
* Add the nested Tokenizer to this Tokens ResourceCollection. | |||
* A LineTokenizer will be used by default. | |||
* @param tokenizer the tokenizer to add. | |||
*/ | |||
public synchronized void add(Tokenizer tokenizer) { | |||
if (isReference()) { | |||
throw noChildrenAllowed(); | |||
} | |||
if (this.tokenizer != null) { | |||
throw new BuildException("Only one nested tokenizer allowed."); | |||
} | |||
this.tokenizer = tokenizer; | |||
} | |||
/** | |||
* Overrides the BaseResourceCollectionContainer version | |||
* to check the nested Tokenizer. | |||
* @param stk the stack of data types to use (recursively). | |||
* @param p the project to use to dereference the references. | |||
* @throws BuildException on error. | |||
*/ | |||
protected synchronized void dieOnCircularReference(Stack stk, Project p) | |||
throws BuildException { | |||
if (isChecked()) { | |||
return; | |||
} | |||
if (isReference()) { | |||
super.dieOnCircularReference(stk, p); | |||
} else { | |||
if (tokenizer instanceof DataType) { | |||
stk.push(tokenizer); | |||
invokeCircularReferenceCheck((DataType) tokenizer, stk, p); | |||
} | |||
setChecked(true); | |||
} | |||
} | |||
} |
@@ -0,0 +1,111 @@ | |||
<project name="test-tokens" default="antunit" | |||
xmlns:au="antlib:org.apache.ant.antunit"> | |||
<property name="eol" value="${line.separator}" /> | |||
<target name="antunit"> | |||
<au:antunit> | |||
<au:plainlistener /> | |||
<file file="${ant.file}" /> | |||
</au:antunit> | |||
</target> | |||
<target name="testLines"> | |||
<au:assertTrue> | |||
<resourcecount count="0"> | |||
<difference> | |||
<tokens> | |||
<string value="foo${eol}bar${eol}baz" /> | |||
</tokens> | |||
<resources> | |||
<string value="foo" /> | |||
<string value="bar" /> | |||
<string value="baz" /> | |||
</resources> | |||
</difference> | |||
</resourcecount> | |||
</au:assertTrue> | |||
</target> | |||
<target name="testExplicitLines"> | |||
<au:assertTrue> | |||
<resourcecount count="0"> | |||
<difference> | |||
<tokens> | |||
<string value="foo${eol}bar${eol}baz" /> | |||
<linetokenizer /> | |||
</tokens> | |||
<resources> | |||
<string value="foo" /> | |||
<string value="bar" /> | |||
<string value="baz" /> | |||
</resources> | |||
</difference> | |||
</resourcecount> | |||
</au:assertTrue> | |||
</target> | |||
<target name="testFileTokenizer"> | |||
<au:assertTrue> | |||
<resourcecount count="1"> | |||
<tokens> | |||
<resources> | |||
<string value="foo${eol}bar${eol}baz" /> | |||
<file file="${ant.file}" /> | |||
</resources> | |||
<filetokenizer /> | |||
</tokens> | |||
</resourcecount> | |||
</au:assertTrue> | |||
</target> | |||
<target name="testStringTokenizer"> | |||
<au:assertTrue> | |||
<resourcecount count="0"> | |||
<difference> | |||
<tokens> | |||
<string value="foo bar baz " /> | |||
<stringtokenizer /> | |||
</tokens> | |||
<resources> | |||
<string value="foo" /> | |||
<string value="bar" /> | |||
<string value="baz" /> | |||
</resources> | |||
</difference> | |||
</resourcecount> | |||
</au:assertTrue> | |||
</target> | |||
<target name="testEncoding"> | |||
<au:assertTrue> | |||
<resourcecount count="0"> | |||
<difference> | |||
<tokens encoding="utf-16"> | |||
<file file="utf-16.in" /> | |||
</tokens> | |||
<resources> | |||
<string value="foo" /> | |||
<string value="bar" /> | |||
<string value="baz" /> | |||
</resources> | |||
</difference> | |||
</resourcecount> | |||
</au:assertTrue> | |||
</target> | |||
<target name="testSort"> | |||
<pathconvert property="sorted" pathsep="${eol}"> | |||
<sort> | |||
<tokens> | |||
<string value="foo bar etc baz" /> | |||
<stringtokenizer /> | |||
</tokens> | |||
</sort> | |||
</pathconvert> | |||
<au:assertTrue> | |||
<equals arg1="bar${eol}baz${eol}etc${eol}foo" arg2="${sorted}" /> | |||
</au:assertTrue> | |||
</target> | |||
</project> |