Commit 0995fd75 authored by Eugene Zhukov's avatar Eugene Zhukov

Imported Upstream version 4.0.1

parent 2da8a562
#########################
#### GENERAL or ROOT ####
#########################
# Transifex
.transifexrc
# Maven
target/
# Eclipse
/bin/
.classpath
.project
.settings/
......
[main]
host = https://www.transifex.com
minimum_perc = 50
[epubcheck.ErrorsWarnings]
file_filter = src/main/resources/com/adobe/epubcheck/messages/MessageBundle_<lang>.properties
source_file = src/main/resources/com/adobe/epubcheck/messages/MessageBundle.properties
source_lang = en
type = UNICODEPROPERTIES
[epubcheck.CommandlineMessages]
file_filter = src/main/resources/com/adobe/epubcheck/util/messages_<lang>.properties
source_file = src/main/resources/com/adobe/epubcheck/util/messages.properties
source_lang = en
type = UNICODEPROPERTIES
[epubcheck.CssParser]
file_filter = src/main/resources/org/idpf/epubcheck/util/css/messages_<lang>.properties
source_file = src/main/resources/org/idpf/epubcheck/util/css/messages.properties
source_lang = en
type = UNICODEPROPERTIES
[epubcheck.jing-xsd-validation]
file_filter = src/main/resources/com/thaiopensource/datatype/xsd/resources/Messages_<lang>.properties
source_file = src/main/resources/com/thaiopensource/datatype/xsd/resources/Messages.properties
source_lang = en
type = PROPERTIES
[epubcheck.jing-relaxng-validation]
file_filter = src/main/resources/com/thaiopensource/relaxng/pattern/resources/Messages_<lang>.properties
source_file = src/main/resources/com/thaiopensource/relaxng/pattern/resources/Messages.properties
source_lang = en
type = PROPERTIES
[epubcheck.jing-schematron-validation]
file_filter = src/main/resources/com/thaiopensource/validate/schematron/resources/Messages_<lang>.properties
source_file = src/main/resources/com/thaiopensource/validate/schematron/resources/Messages.properties
source_lang = en
type = PROPERTIES
......@@ -11,12 +11,12 @@ as a standalone command-line tool or used as a Java library.
Check the [release page](https://github.com/IDPF/epubcheck/releases) to get the latest distribution.
[EpubCheck 4.0](https://github.com/IDPF/epubcheck/releases/tag/v4.0) is the latest recommended version to validate both EPUB 2 and 3 files.
[EpubCheck 4.0](https://github.com/IDPF/epubcheck/releases/tag/v4.0.0) is the latest recommended version to validate both EPUB 2 and 3 files.
## Documentation
Documentation on how to **use** or how to **contribute** is available on the [EpubCheck wiki](https://github.com/IDPF/epubcheck/wiki).
Documentation on how to **use** EpubCheck, to **contribute** to the project or to **translate** messages is available on the [EpubCheck wiki](https://github.com/IDPF/epubcheck/wiki).
Technical discussions are hosted on the [EpubCheck Google Group](https://groups.google.com/forum/#!forum/epubcheck)
......@@ -51,6 +51,7 @@ EpubCheck is made available under the terms of the [New BSD License](http://open
----
## Building EpubCheck
[![Build Status](https://travis-ci.org/IDPF/epubcheck.svg?branch=master)](https://travis-ci.org/IDPF/epubcheck/)
To build epubcheck from the sources you need Java Development Kit (JDK) 1.7 or above and [Apache Maven](http://maven.apache.org/) 2.3 or above installed.
On Windows, you should build in a git bash shell (see http://github.com help)
......@@ -64,27 +65,3 @@ Build and run tests:
$ mvn install
```
Will copy `.*jar` files and packages to `target/` folder...
## Translating EpubCjeck
EpubCheck 4.0 now comes with *i18n* support. Feel free to translate the Java messsages file to your language and then open a Pull Request here.
Translations so far:
* `en` - English
* `ja` - Japanese
* `de` - German (Tobias Fischer, PR #462)
* `es` - Spanish (Emiliano Molina, PR #470, #482)
* `fr` - French (Vincent Gros, PR #472)
* `it` - Italian (Alberto Pettarin, PR #496)
To translate, simply copy the original (english) message file (`MessageBundle.properties` or `messages.properties`) located in
* [src/main/resources/com/adobe/epubcheck/messages/](src/main/resources/com/adobe/epubcheck/messages/)
* [src/main/resources/com/adobe/epubcheck/util](src/main/resources/com/adobe/epubcheck/util)
* [src/main/resources/com/thaiopensource/datatype/xsd/resources](src/main/resources/com/thaiopensource/datatype/xsd/resources)
* [src/main/resources/com/thaiopensource/relaxng/pattern/resources](src/main/resources/com/thaiopensource/relaxng/pattern/resources)
* [src/main/resources/org/idpf/epubcheck/util/css](src/main/resources/org/idpf/epubcheck/util/css)
to `MessageBundle_XX.properties` or `messages_XX.properties` in its respective folder and start translating.
Send us a PullRequest when you finished translation and think it's ready to merge...
......@@ -11,7 +11,7 @@
<groupId>org.idpf</groupId>
<artifactId>epubcheck</artifactId>
<version>4.0.0</version>
<version>4.0.1</version>
<packaging>jar</packaging>
......@@ -140,7 +140,7 @@
<connection>scm:git:ssh://git@github.com:IDPF/epubcheck.git</connection>
<developerConnection>scm:git:ssh://git@github.com:IDPF/epubcheckgit</developerConnection>
<url>https://github.com/IDPF/epubcheck</url>
<tag>v4.0.0</tag>
<tag>v4.0.1</tag>
</scm>
<issueManagement>
<system>Github</system>
......@@ -233,6 +233,8 @@
<directory>src/main/resources</directory>
<excludes>
<exclude>com/adobe/epubcheck/schema/30/*.sch</exclude>
<exclude>com/thaiopensource/relaxng/pattern/resources/Messages.properties</exclude>
<exclude>com/thaiopensource/datatype/xsd/resources/Messages.properties</exclude>
</excludes>
</resource>
<resource>
......@@ -418,6 +420,13 @@
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<additionalparam>-Xdoclint:none</additionalparam>
</configuration>
</plugin>
<plugin>
<!-- required to work around issues with git 1.8.5 and release-plugin -->
<!-- https://jira.codehaus.org/browse/SCM-738 -->
......
#!/bin/bash
# bash script to update and normalize
# pulled transifex properties files
#
# Author: Tobias Fischer (https://github.com/tofi86)
# Project: IDPF/EpubCheck (https://github.com/IDPF/epubcheck)
#
# Date: 2015-10-09
# License: MIT License
#
param1=$1
function escapeISO88591() {
file=$1
echo "- Escaping ISO-8859-1 encodings with Unicode escapes"
native2ascii -encoding ISO-8859-1 ${file} ${file}
}
function removeJavaEscapes() {
file=$1
# replace \\ -> \, \` -> `, \= -> =, \: -> :, \! -> !
sed -E -i -- 's/\\([\\`=:!])/\1/g' ${file}
# make unicode escapes \u00fc uppercase \u00FC
perl -i -pe 's/\\u([0-9a-f]{4})/\\u\U\1/g' ${file}
# replace newlines in help_text
sed -E -i -- '/^help_text/s/((\\n)+)/\1\\\'$'\n /g' ${file}
sed -E -i -- 's/^( )([[:space:]]+)/\1\\\2/g' ${file}
# remove temp file
rm ${file}-- 2> /dev/null
}
function processFile() {
file=$1
echo ""
echo "Processing file '${file}'"
file ${file} | grep 'ISO-8859' > /dev/null
if [ $? -eq 0 ]; then
escapeISO88591 ${file}
fi
removeJavaEscapes ${file}
}
# Check if this is running from repo root dir near the .tx/ folder
if [ ! -d .tx/ ] ; then
echo "FATAL: You need to run this script from the repository's root directory!"
echo "e.g. ./src/build/transifex-pull.sh --all"
exit 1
fi
# Check for Transifex Commandline Client
if [ `which tx >/dev/null ; echo $?` -eq 1 ] ; then
echo "FATAL: You need to install the Transifex Commandline Client first in order to run this script!"
echo "Instructions: http://docs.transifex.com/client/setup/"
exit 1
fi
# Show help if no language parameter is passed to the script or --help
if [[ -z ${param1} || ${param1} == "--help" ]] ; then
echo "usage: transifex-pull.sh [--all | <2-digit-country-code>]"
echo "examples:"
echo " transifex-pull.sh --all"
echo " transifex-pull.sh de"
# Pull ALL translations
elif [ ${param1} == "--all" ] ; then
minimum_percent_translated=$(awk -F "=" '/minimum_perc/ {print $2}' .tx/config)
echo "Pulling ALL epubcheck translations (>${minimum_percent_translated}% done) from Transifex..."
echo ""
tx pull -f | tee /dev/stderr | grep "> [a-z][a-z]: " | awk '{print $3}' | while read f; do processFile ${f}; done
# Pull translations for a 2-digit-language-code
elif [ ${#param1} -eq 2 ] ; then
echo "Pulling epubcheck translation '${param1}' from Transifex..."
echo ""
tx pull -f -l ${param1} | tee /dev/stderr | grep "${param1}: " | awk '{print $3}' | while read f; do processFile ${f}; done
else
echo "FATAL: Couldn't recognize language code '${param1}'. Exit."
exit 1
fi
......@@ -105,7 +105,12 @@ public class BitmapChecker implements ContentChecker
tempFile = getImageFile(ocf, imgFileName);
if (tempFile != null)
{
return new ImageHeuristics(0, 0, tempFile.length());
long tempFileLength = tempFile.length();
if (ocf.getClass() == OCFZipPackage.class)
{
tempFile.delete();
}
return new ImageHeuristics(0, 0, tempFileLength);
}
return null;
}
......@@ -129,8 +134,10 @@ public class BitmapChecker implements ContentChecker
}
if (formatFromSuffix != null && formatFromSuffix.equals(formatFromInputStream)) break;
}
if (formatFromSuffix != null && formatFromSuffix.equals(formatFromInputStream)) {
if (formatFromSuffix != null && formatFromSuffix.equals(formatFromInputStream)) {
// file format and file extension matches; read image file
try {
......@@ -155,18 +162,32 @@ public class BitmapChecker implements ContentChecker
report.message(MessageId.PKG_021, EPUBLocation.create(imgFileName));
return null;
}
finally
{
if (ocf.getClass() == OCFZipPackage.class)
{
tempFile.delete();
}
}
} else if (formatFromSuffix != null) {
// file format and file extension differs
report.message(MessageId.PKG_022, EPUBLocation.create(imgFileName), formatFromInputStream, suffix);
return null;
} else {
// file format could not be determined
throw new IOException("Not a known image file: " + imgFileName);
}
} else
{
if (ocf.getClass() == OCFZipPackage.class)
{
tempFile.delete();
}
if (formatFromSuffix != null) {
// file format and file extension differs
report.message(MessageId.PKG_022, EPUBLocation.create(imgFileName), formatFromInputStream, suffix);
return null;
} else {
// file format could not be determined
throw new IOException("Not a known image file: " + imgFileName);
}
}
}
private File getImageFile(OCFPackage ocf, String imgFileName) throws IOException
......@@ -211,7 +232,6 @@ public class BitmapChecker implements ContentChecker
String prefix = "img";
file = File.createTempFile(prefix, suffix);
file.deleteOnExit();
os = new FileOutputStream(file);
is = ocf.getInputStream(imgFileName);
......
......@@ -82,10 +82,11 @@ public class EpubExtLinksCheck implements DocumentValidator
}
ZipEntry imgentry = epack.getZip().getEntry(imageFile);
if (imgentry == null)
if (imgentry == null && "altimg".equalsIgnoreCase(type))
{
MessageId id = "img".compareToIgnoreCase(type) == 0 ? MessageId.RSC_001 : MessageId.RSC_018;
report.message(id, EPUBLocation.create(fileToParse, value.getLine(), value.getColumn(), value.getContext()), value.getValue());
// missing "img" already reported in XRefChecker
// MessageId id = "img".compareToIgnoreCase(type) == 0 ? MessageId.RSC_001 : MessageId.RSC_018;
report.message(MessageId.RSC_018, EPUBLocation.create(fileToParse, value.getLine(), value.getColumn(), value.getContext()), value.getValue());
}
}
}
......
......@@ -120,7 +120,7 @@ public class EpubHTML5StructureCheck implements DocumentValidator
|| fileExtension.compareToIgnoreCase("xhtml") == 0))
{
// Note: extension is already checked in OPFChecker30 for EPUB 3
report.message(MessageId.HTM_014, EPUBLocation.create(mi.getHref()));
report.message(MessageId.HTM_014, EPUBLocation.create(epubPackage.getPackageMainPath() + "/" + mi.getHref()));
}
/***VALIDATE DOCTYPE***/
......@@ -128,11 +128,11 @@ public class EpubHTML5StructureCheck implements DocumentValidator
if ((0 != (docTypeMatches & hasHTML4)) && (epubPackage.getVersion() == EPUBVersion.VERSION_3))
{
report.message(MessageId.HTM_015, EPUBLocation.create(mi.getHref()));
report.message(MessageId.HTM_015, EPUBLocation.create(epubPackage.getPackageMainPath() + "/" + mi.getHref()));
}
else if ((0 != (docTypeMatches & hasHTML5)) && ((hasXhtml != (docTypeMatches & hasXhtml))) && (epubPackage.getVersion() == EPUBVersion.VERSION_2))
{
report.message(MessageId.HTM_016, EPUBLocation.create(mi.getHref()));
report.message(MessageId.HTM_016, EPUBLocation.create(epubPackage.getPackageMainPath() + "/" + mi.getHref()));
}
parser.parseDoc(fileToParse, sh);
......
......@@ -161,8 +161,8 @@ public class EpubLangCheck implements DocumentValidator
{"Gujarati", "gu"},
{"Haitian Creole", "ht"},
{"Hausa", "ha"},
{"Hawaiian", " "},
{"Hebrew", "he, iw"},
{"Hawaiian", "haw"},
{"Hebrew", "he"},
{"Hindi", "hi"},
{"Hungarian", "hu"},
{"Ibibio", " "},
......
......@@ -89,10 +89,10 @@ public class EpubSVGCheck implements DocumentValidator
}
}
}
}
if (itemIsFixedFormat)
{
checkSvgDoc(fileToParse);
if (itemIsFixedFormat)
{
checkSvgDoc(fileToParse);
}
}
}
}
......
......@@ -63,11 +63,15 @@ class XmlDocParser
}
catch (IOException e)
{
report.message(MessageId.PKG_008, EPUBLocation.create(fileEntry), fileEntry);
// Ignore, should have been reported earlier
// report.message(MessageId.PKG_008, EPUBLocation.create(fileEntry),
// fileEntry);
}
catch (SAXException e)
{
report.message(MessageId.RSC_005, EPUBLocation.create(fileEntry), e.getMessage());
// Ignore, should have been reported earlier
// report.message(MessageId.RSC_005, EPUBLocation.create(fileEntry),
// e.getMessage());
doc = null;
}
finally
......
......@@ -70,11 +70,15 @@ public class XMLContentDocParser
}
catch (IOException e)
{
report.message(MessageId.PKG_008, EPUBLocation.create(fileEntry), fileEntry);
// Ignore, should have been reported earlier
// report.message(MessageId.PKG_008, EPUBLocation.create(fileEntry),
// fileEntry);
}
catch (SAXException e)
{
report.message(MessageId.RSC_005, EPUBLocation.create(fileEntry), e.getMessage());
// Ignore, should have been reported earlier
// report.message(MessageId.RSC_005, EPUBLocation.create(fileEntry),
// e.getMessage());
}
catch (ParserConfigurationException e)
{
......
......@@ -20,7 +20,7 @@
*
*/
package dict;
package com.adobe.epubcheck.dict;
import com.adobe.epubcheck.api.EPUBLocation;
import com.adobe.epubcheck.api.Report;
......
......@@ -20,7 +20,7 @@
*
*/
package dict;
package com.adobe.epubcheck.dict;
import com.adobe.epubcheck.opf.ContentCheckerFactory;
import com.adobe.epubcheck.opf.DocumentValidatorFactory;
......
package dict;
package com.adobe.epubcheck.dict;
import com.adobe.epubcheck.opf.ValidationContext;
import com.adobe.epubcheck.opf.XRefChecker.Type;
......
......@@ -100,7 +100,7 @@ public class MessageDictionary
map.put(MessageId.CSS_004, Severity.ERROR);
map.put(MessageId.CSS_005, Severity.ERROR);
map.put(MessageId.CSS_006, Severity.WARNING);
map.put(MessageId.CSS_007, Severity.WARNING);
map.put(MessageId.CSS_007, Severity.INFO);
map.put(MessageId.CSS_008, Severity.ERROR);
map.put(MessageId.CSS_009, Severity.USAGE);
map.put(MessageId.CSS_010, Severity.ERROR);
......@@ -136,11 +136,11 @@ public class MessageDictionary
map.put(MessageId.HTM_013, Severity.USAGE);
map.put(MessageId.HTM_014, Severity.WARNING);
map.put(MessageId.HTM_014a, Severity.WARNING);
map.put(MessageId.HTM_015, Severity.WARNING);
map.put(MessageId.HTM_016, Severity.WARNING);
map.put(MessageId.HTM_015, Severity.SUPPRESSED);
map.put(MessageId.HTM_016, Severity.SUPPRESSED);
map.put(MessageId.HTM_017, Severity.ERROR);
map.put(MessageId.HTM_018, Severity.ERROR);
map.put(MessageId.HTM_019, Severity.ERROR);
map.put(MessageId.HTM_018, Severity.USAGE);
map.put(MessageId.HTM_019, Severity.USAGE);
map.put(MessageId.HTM_020, Severity.USAGE);
map.put(MessageId.HTM_021, Severity.USAGE);
map.put(MessageId.HTM_022, Severity.USAGE);
......
......@@ -33,6 +33,7 @@ import com.adobe.epubcheck.api.EPUBProfile;
import com.adobe.epubcheck.api.FeatureReport.Feature;
import com.adobe.epubcheck.bitmap.BitmapCheckerFactory;
import com.adobe.epubcheck.css.CSSCheckerFactory;
import com.adobe.epubcheck.dict.SearchKeyMapCheckerFactory;
import com.adobe.epubcheck.dtbook.DTBookCheckerFactory;
import com.adobe.epubcheck.messages.MessageId;
import com.adobe.epubcheck.opf.MetadataSet.Metadata;
......@@ -48,8 +49,6 @@ import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.io.Files;
import dict.SearchKeyMapCheckerFactory;
public class OPFChecker30 extends OPFChecker implements DocumentValidator
{
......
......@@ -20,6 +20,7 @@ import com.adobe.epubcheck.util.FeatureEnum;
import com.adobe.epubcheck.util.PathUtil;
import com.adobe.epubcheck.vocab.AggregateVocab;
import com.adobe.epubcheck.vocab.AltStylesheetVocab;
import com.adobe.epubcheck.vocab.ComicsVocab;
import com.adobe.epubcheck.vocab.DataNavVocab;
import com.adobe.epubcheck.vocab.DictVocab;
import com.adobe.epubcheck.vocab.EnumVocab;
......@@ -37,6 +38,7 @@ import com.adobe.epubcheck.xml.XMLAttribute;
import com.adobe.epubcheck.xml.XMLElement;
import com.adobe.epubcheck.xml.XMLParser;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
......@@ -49,7 +51,7 @@ public class OPSHandler30 extends OPSHandler
private static Map<String, Vocab> RESERVED_VOCABS = ImmutableMap.<String, Vocab> of("",
AggregateVocab.of(StructureVocab.VOCAB, StagingEdupubVocab.VOCAB, DataNavVocab.VOCAB,
DictVocab.VOCAB, IndexVocab.VOCAB));
DictVocab.VOCAB, IndexVocab.VOCAB, ComicsVocab.VOCAB));
private static Map<String, Vocab> ALTCSS_VOCABS = ImmutableMap.<String, Vocab> of("",
AltStylesheetVocab.VOCAB);
private static Map<String, Vocab> KNOWN_VOCAB_URIS = ImmutableMap.of();
......@@ -75,6 +77,7 @@ public class OPSHandler30 extends OPSHandler
protected boolean inBody = false;
protected boolean inRegionBasedNav = false;
protected boolean hasAltorAnnotation = false;
protected boolean hasTitle = false;
static protected final String[] scriptEventsStrings = { "onafterprint", "onbeforeprint",
"onbeforeunload", "onerror", "onhaschange", "onload", "onmessage", "onoffline", "onpagehide",
......@@ -278,6 +281,10 @@ public class OPSHandler30 extends OPSHandler
{
hasAltorAnnotation = true;
}
else if ("http://www.w3.org/2000/svg".equals(e.getNamespace()) && name.equals("title"))
{
hasTitle = true;
}
processInlineScripts(e);
......@@ -342,14 +349,10 @@ public class OPSHandler30 extends OPSHandler
{
anchorNeedsText = false;
}
if (inSvg)
if (inSvg || context.mimeType.equals("image/svg+xml"))
{
String titleAttribute = e.getAttributeNS(EpubConstants.XLinkNamespaceUri, "title");
if (titleAttribute == null)
{
report.message(MessageId.ACC_011, EPUBLocation.create(path, parser.getLineNumber(),
parser.getColumnNumber(), e.getName()));
}
hasTitle = Strings
.emptyToNull(e.getAttributeNS(EpubConstants.XLinkNamespaceUri, "title")) != null;
}
}
......@@ -458,8 +461,11 @@ public class OPSHandler30 extends OPSHandler
{
refType = XRefChecker.Type.GENERIC;
}
xrefChecker.get().registerReference(path, parser.getLineNumber(), parser.getColumnNumber(),
src, refType);
if (!"img".equals(name)) // img already registered in super class
{
xrefChecker.get().registerReference(path, parser.getLineNumber(), parser.getColumnNumber(),
src, refType);
}
srcMimeType = xrefChecker.get().getMimeType(src);
}
......@@ -653,6 +659,11 @@ public class OPSHandler30 extends OPSHandler
EPUBLocation.create(path, parser.getLineNumber(), parser.getColumnNumber(), "a"));
anchorNeedsText = false;
}
if ((inSvg || context.mimeType.equals("image/svg+xml")) && !hasTitle)
{
report.message(MessageId.ACC_011, EPUBLocation.create(path, parser.getLineNumber(),
parser.getColumnNumber(), e.getName()));
}
}
else if (name.equals("math"))
{
......
......@@ -3,13 +3,23 @@ package com.adobe.epubcheck.vocab;
import java.util.List;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
public class AggregateVocab implements Vocab
{
private final List<Vocab> vocabs;
private final String uri;
/**
* Returns a vocabulary composed of the union of the vocabularies given as
* parameter. The given vocabularies must have the same base URI.
*
* @param vocabs
* the vocabularies to aggregate.
* @return the aggregated vocabulary.
*/
public static Vocab of(Vocab... vocabs)
{
return new AggregateVocab(new ImmutableList.Builder<Vocab>().add(vocabs).build());
......@@ -17,6 +27,14 @@ public class AggregateVocab implements Vocab
private AggregateVocab(List<Vocab> vocabs)
{
this.uri = (!vocabs.isEmpty()) ? Strings.nullToEmpty(vocabs.get(0).getURI()) : "";
for (Vocab vocab : vocabs)
{
if (!uri.equals(Strings.nullToEmpty(vocab.getURI())))
{
throw new IllegalArgumentException("Aggregated vocabs must share the same base URI");
}
}
this.vocabs = vocabs;
}
......@@ -31,4 +49,10 @@ public class AggregateVocab implements Vocab
return Optional.absent();
}
@Override
public String getURI()
{
return uri;
}
}
package com.adobe.epubcheck.vocab;
public class ComicsVocab
{
public static final String URI = "http://www.idpf.org/epub/vocab/structure/#";
public static final EnumVocab<EPUB_TYPES> VOCAB = new EnumVocab<EPUB_TYPES>(EPUB_TYPES.class, URI);
public static enum EPUB_TYPES
{
BALLOON,
PANEL,
PANEL_GROUP,
TEXT_AREA,
SOUND_AREA
}
}
......@@ -6,6 +6,7 @@ import java.util.Map;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;