implement lightweight, minimal JSON/YAML on tiny datastructures

This commit is contained in:
Jörg Prante 2021-10-12 11:41:30 +02:00
parent 44b7ae7de2
commit f9adea4e18
83 changed files with 2744 additions and 1142 deletions

View file

@ -8,7 +8,7 @@ jmhReport {
}
dependencies {
implementation project(':datastructures-json')
implementation project(':datastructures-json-tiny')
implementation project(':datastructures-json-dsl')
implementation project(':datastructures-json-flat')
implementation project(':datastructures-json-iterator')

View file

@ -15,10 +15,10 @@ import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Timeout;
import org.openjdk.jmh.annotations.Warmup;
import org.xbib.datastructures.json.EmptyJsonListener;
import org.xbib.datastructures.json.StandardJsonListener;
import org.xbib.datastructures.json.StringParser;
import org.xbib.datastructures.json.TinyJsonListener;
import org.xbib.datastructures.json.tiny.EmptyJsonListener;
import org.xbib.datastructures.json.tiny.StandardJsonListener;
import org.xbib.datastructures.json.tiny.StringParser;
import org.xbib.datastructures.json.tiny.TinyJsonListener;
import org.xbib.datastructures.json.flat.Json;
import org.xbib.datastructures.json.noggit.ObjectBuilder;
import org.xbib.datastructures.json.simple.JSONParser;

View file

@ -15,10 +15,10 @@ import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.annotations.Timeout;
import org.xbib.datastructures.json.EmptyJsonListener;
import org.xbib.datastructures.json.StandardJsonListener;
import org.xbib.datastructures.json.StringParser;
import org.xbib.datastructures.json.TinyJsonListener;
import org.xbib.datastructures.json.tiny.EmptyJsonListener;
import org.xbib.datastructures.json.tiny.StandardJsonListener;
import org.xbib.datastructures.json.tiny.StringParser;
import org.xbib.datastructures.json.tiny.TinyJsonListener;
import org.xbib.datastructures.json.flat.Json;
import org.xbib.datastructures.json.noggit.ObjectBuilder;
import org.xbib.datastructures.json.simple.JSONParser;

View file

@ -15,10 +15,10 @@ import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Timeout;
import org.openjdk.jmh.annotations.Warmup;
import org.xbib.datastructures.json.EmptyJsonListener;
import org.xbib.datastructures.json.StandardJsonListener;
import org.xbib.datastructures.json.StringParser;
import org.xbib.datastructures.json.TinyJsonListener;
import org.xbib.datastructures.json.tiny.EmptyJsonListener;
import org.xbib.datastructures.json.tiny.StandardJsonListener;
import org.xbib.datastructures.json.tiny.StringParser;
import org.xbib.datastructures.json.tiny.TinyJsonListener;
import org.xbib.datastructures.json.flat.Json;
import org.xbib.datastructures.json.noggit.ObjectBuilder;
import org.xbib.datastructures.json.simple.JSONParser;

View file

@ -0,0 +1,3 @@
module org.xbib.datastructures.api {
exports org.xbib.datastructures.api;
}

View file

@ -0,0 +1,28 @@
package org.xbib.datastructures.api;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
public interface Builder {
Builder beginCollection() throws IOException;
Builder endCollection() throws IOException;
Builder beginMap() throws IOException;
Builder endMap() throws IOException;
Builder buildMap(Map<String, Object> map) throws IOException;
Builder buildCollection(Collection<Object> collection) throws IOException;
Builder buildKey(CharSequence charSequence) throws IOException;
Builder buildValue(Object object) throws IOException;
Builder buildNull() throws IOException;
String build();
}

View file

@ -0,0 +1,232 @@
package org.xbib.datastructures.api;
/**
* A {@code SizeUnit} represents size at a given unit of
* granularity and provides utility methods to convert across units.
* A {@code SizeUnit} does not maintain size information, but only
* helps organize and use size representations that may be maintained
* separately across various contexts.
*/
public enum ByteSizeUnit {
BYTES {
@Override
public long toBytes(long size) {
return size;
}
@Override
public long toKB(long size) {
return size / (C1 / C0);
}
@Override
public long toMB(long size) {
return size / (C2 / C0);
}
@Override
public long toGB(long size) {
return size / (C3 / C0);
}
@Override
public long toTB(long size) {
return size / (C4 / C0);
}
@Override
public long toPB(long size) {
return size / (C5 / C0);
}
},
KB {
@Override
public long toBytes(long size) {
return x(size, C1 / C0, MAX / (C1 / C0));
}
@Override
public long toKB(long size) {
return size;
}
@Override
public long toMB(long size) {
return size / (C2 / C1);
}
@Override
public long toGB(long size) {
return size / (C3 / C1);
}
@Override
public long toTB(long size) {
return size / (C4 / C1);
}
@Override
public long toPB(long size) {
return size / (C5 / C1);
}
},
MB {
@Override
public long toBytes(long size) {
return x(size, C2 / C0, MAX / (C2 / C0));
}
@Override
public long toKB(long size) {
return x(size, C2 / C1, MAX / (C2 / C1));
}
@Override
public long toMB(long size) {
return size;
}
@Override
public long toGB(long size) {
return size / (C3 / C2);
}
@Override
public long toTB(long size) {
return size / (C4 / C2);
}
@Override
public long toPB(long size) {
return size / (C5 / C2);
}
},
GB {
@Override
public long toBytes(long size) {
return x(size, C3 / C0, MAX / (C3 / C0));
}
@Override
public long toKB(long size) {
return x(size, C3 / C1, MAX / (C3 / C1));
}
@Override
public long toMB(long size) {
return x(size, C3 / C2, MAX / (C3 / C2));
}
@Override
public long toGB(long size) {
return size;
}
@Override
public long toTB(long size) {
return size / (C4 / C3);
}
@Override
public long toPB(long size) {
return size / (C5 / C3);
}
},
TB {
@Override
public long toBytes(long size) {
return x(size, C4 / C0, MAX / (C4 / C0));
}
@Override
public long toKB(long size) {
return x(size, C4 / C1, MAX / (C4 / C1));
}
@Override
public long toMB(long size) {
return x(size, C4 / C2, MAX / (C4 / C2));
}
@Override
public long toGB(long size) {
return x(size, C4 / C3, MAX / (C4 / C3));
}
@Override
public long toTB(long size) {
return size;
}
@Override
public long toPB(long size) {
return size / (C5 / C4);
}
},
PB {
@Override
public long toBytes(long size) {
return x(size, C5 / C0, MAX / (C5 / C0));
}
@Override
public long toKB(long size) {
return x(size, C5 / C1, MAX / (C5 / C1));
}
@Override
public long toMB(long size) {
return x(size, C5 / C2, MAX / (C5 / C2));
}
@Override
public long toGB(long size) {
return x(size, C5 / C3, MAX / (C5 / C3));
}
@Override
public long toTB(long size) {
return x(size, C5 / C4, MAX / (C5 / C4));
}
@Override
public long toPB(long size) {
return size;
}
};
static final long C0 = 1L;
static final long C1 = C0 * 1024L;
static final long C2 = C1 * 1024L;
static final long C3 = C2 * 1024L;
static final long C4 = C3 * 1024L;
static final long C5 = C4 * 1024L;
static final long MAX = Long.MAX_VALUE;
/**
* Scale d by m, checking for overflow.
* This has a short name to make above code more readable.
*/
static long x(long d, long m, long over) {
if (d > over) {
return Long.MAX_VALUE;
}
if (d < -over) {
return Long.MIN_VALUE;
}
return d * m;
}
public abstract long toBytes(long size);
public abstract long toKB(long size);
public abstract long toMB(long size);
public abstract long toGB(long size);
public abstract long toTB(long size);
public abstract long toPB(long size);
}

View file

@ -0,0 +1,228 @@
package org.xbib.datastructures.api;
import java.util.Locale;
/**
*
*/
public class ByteSizeValue {
private long size;
private ByteSizeUnit sizeUnit;
private ByteSizeValue() {
}
public ByteSizeValue(long size, ByteSizeUnit sizeUnit) {
this.size = size;
this.sizeUnit = sizeUnit;
}
/**
* Format the double value with a single decimal points, trimming trailing '.0'.
* @param value value
* @param suffix suffix
* @return formatted decimal
*/
public static String format1Decimals(double value, String suffix) {
String p = String.valueOf(value);
int ix = p.indexOf('.') + 1;
int ex = p.indexOf('E');
char fraction = p.charAt(ix);
if (fraction == '0') {
if (ex != -1) {
return p.substring(0, ix - 1) + p.substring(ex) + suffix;
} else {
return p.substring(0, ix - 1) + suffix;
}
} else {
if (ex != -1) {
return p.substring(0, ix) + fraction + p.substring(ex) + suffix;
} else {
return p.substring(0, ix) + fraction + suffix;
}
}
}
public static ByteSizeValue parseBytesSizeValue(String sValue) {
return parseBytesSizeValue(sValue, null);
}
public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue) {
if (sValue == null) {
return defaultValue;
}
long bytes;
try {
String lastTwoChars = sValue.substring(sValue.length() - Math.min(2, sValue.length())).toLowerCase(Locale.ROOT);
if (lastTwoChars.endsWith("k")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C1);
} else if (lastTwoChars.endsWith("kb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C1);
} else if (lastTwoChars.endsWith("m")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C2);
} else if (lastTwoChars.endsWith("mb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C2);
} else if (lastTwoChars.endsWith("g")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C3);
} else if (lastTwoChars.endsWith("gb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C3);
} else if (lastTwoChars.endsWith("t")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C4);
} else if (lastTwoChars.endsWith("tb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C4);
} else if (lastTwoChars.endsWith("p")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C5);
} else if (lastTwoChars.endsWith("pb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C5);
} else if (lastTwoChars.endsWith("b")) {
bytes = Long.parseLong(sValue.substring(0, sValue.length() - 1));
} else {
bytes = Long.parseLong(sValue);
}
} catch (NumberFormatException e) {
return defaultValue;
}
return new ByteSizeValue(bytes, ByteSizeUnit.BYTES);
}
public int bytesAsInt() throws IllegalArgumentException {
long bytes = bytes();
if (bytes > Integer.MAX_VALUE) {
throw new IllegalArgumentException("size [" + toString() + "] is bigger than max int");
}
return (int) bytes;
}
public long bytes() {
return sizeUnit.toBytes(size);
}
public long getBytes() {
return bytes();
}
public long kb() {
return sizeUnit.toKB(size);
}
public long getKb() {
return kb();
}
public long mb() {
return sizeUnit.toMB(size);
}
public long getMb() {
return mb();
}
public long gb() {
return sizeUnit.toGB(size);
}
public long getGb() {
return gb();
}
public long tb() {
return sizeUnit.toTB(size);
}
public long getTb() {
return tb();
}
public long pb() {
return sizeUnit.toPB(size);
}
public long getPb() {
return pb();
}
public double kbFrac() {
return ((double) bytes()) / ByteSizeUnit.C1;
}
public double getKbFrac() {
return kbFrac();
}
public double mbFrac() {
return ((double) bytes()) / ByteSizeUnit.C2;
}
public double getMbFrac() {
return mbFrac();
}
public double gbFrac() {
return ((double) bytes()) / ByteSizeUnit.C3;
}
public double getGbFrac() {
return gbFrac();
}
public double tbFrac() {
return ((double) bytes()) / ByteSizeUnit.C4;
}
public double getTbFrac() {
return tbFrac();
}
public double pbFrac() {
return ((double) bytes()) / ByteSizeUnit.C5;
}
public double getPbFrac() {
return pbFrac();
}
@Override
public String toString() {
long bytes = bytes();
double value = bytes;
String suffix = "b";
if (bytes >= ByteSizeUnit.C5) {
value = pbFrac();
suffix = "pb";
} else if (bytes >= ByteSizeUnit.C4) {
value = tbFrac();
suffix = "tb";
} else if (bytes >= ByteSizeUnit.C3) {
value = gbFrac();
suffix = "gb";
} else if (bytes >= ByteSizeUnit.C2) {
value = mbFrac();
suffix = "mb";
} else if (bytes >= ByteSizeUnit.C1) {
value = kbFrac();
suffix = "kb";
}
return format1Decimals(value, suffix);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ByteSizeValue sizeValue = (ByteSizeValue) o;
return size == sizeValue.size && sizeUnit == sizeValue.sizeUnit;
}
@Override
public int hashCode() {
int result = (int) (size ^ (size >>> 32));
result = 31 * result + (sizeUnit != null ? sizeUnit.hashCode() : 0);
return result;
}
}

View file

@ -0,0 +1,47 @@
package org.xbib.datastructures.api;
import java.time.Instant;
import java.util.function.Consumer;
public interface DataStructure {
Parser createParser();
Generator createGenerator(Node<?> root);
Builder createBuilder();
Builder createBuilder(Consumer<String> consumer);
void setRoot(Node<?> root);
Node<?> getRoot();
Node<?> getNode(String path);
boolean set(String path, Object value);
Boolean getBoolean(String path);
Byte getByte(String path);
Short getShort(String path);
Integer getInteger(String path);
Long getLong(String path);
Float getFloat(String path);
Double getDouble(String path);
Character getCharacter(String path);
String getString(String path);
Instant getInstant(String path);
TimeValue getAsTime(String setting, TimeValue defaultValue);
ByteSizeValue getAsBytesSize(String setting, ByteSizeValue defaultValue);
}

View file

@ -0,0 +1,9 @@
package org.xbib.datastructures.api;
import java.io.IOException;
import java.io.Writer;
public interface Generator {
void generate(Writer writer) throws IOException;
}

View file

@ -0,0 +1,6 @@
package org.xbib.datastructures.api;
import java.util.List;
public interface ListNode extends Node<List<Node<?>>> {
}

View file

@ -0,0 +1,6 @@
package org.xbib.datastructures.api;
import java.util.Map;
public interface MapNode extends Node<Map<CharSequence, Node<?>>> {
}

View file

@ -1,4 +1,4 @@
package org.xbib.datastructures.yaml;
package org.xbib.datastructures.api;
public interface Node<T> {

View file

@ -0,0 +1,9 @@
package org.xbib.datastructures.api;
import java.io.IOException;
import java.io.Reader;
public interface Parser {
Node<?> parse(Reader reader) throws IOException;
}

View file

@ -0,0 +1,229 @@
package org.xbib.datastructures.api;
/**
*
*/
public enum SizeUnit {
SCALAR {
@Override
public long toScalar(long size) {
return size;
}
@Override
public long toKilo(long size) {
return size / (C1 / C0);
}
@Override
public long toMega(long size) {
return size / (C2 / C0);
}
@Override
public long toGiga(long size) {
return size / (C3 / C0);
}
@Override
public long toTera(long size) {
return size / (C4 / C0);
}
@Override
public long toPeta(long size) {
return size / (C5 / C0);
}
},
KILO {
@Override
public long toScalar(long size) {
return x(size, C1 / C0, MAX / (C1 / C0));
}
@Override
public long toKilo(long size) {
return size;
}
@Override
public long toMega(long size) {
return size / (C2 / C1);
}
@Override
public long toGiga(long size) {
return size / (C3 / C1);
}
@Override
public long toTera(long size) {
return size / (C4 / C1);
}
@Override
public long toPeta(long size) {
return size / (C5 / C1);
}
},
MEGA {
@Override
public long toScalar(long size) {
return x(size, C2 / C0, MAX / (C2 / C0));
}
@Override
public long toKilo(long size) {
return x(size, C2 / C1, MAX / (C2 / C1));
}
@Override
public long toMega(long size) {
return size;
}
@Override
public long toGiga(long size) {
return size / (C3 / C2);
}
@Override
public long toTera(long size) {
return size / (C4 / C2);
}
@Override
public long toPeta(long size) {
return size / (C5 / C2);
}
},
GIGA {
@Override
public long toScalar(long size) {
return x(size, C3 / C0, MAX / (C3 / C0));
}
@Override
public long toKilo(long size) {
return x(size, C3 / C1, MAX / (C3 / C1));
}
@Override
public long toMega(long size) {
return x(size, C3 / C2, MAX / (C3 / C2));
}
@Override
public long toGiga(long size) {
return size;
}
@Override
public long toTera(long size) {
return size / (C4 / C3);
}
@Override
public long toPeta(long size) {
return size / (C5 / C3);
}
},
TERA {
@Override
public long toScalar(long size) {
return x(size, C4 / C0, MAX / (C4 / C0));
}
@Override
public long toKilo(long size) {
return x(size, C4 / C1, MAX / (C4 / C1));
}
@Override
public long toMega(long size) {
return x(size, C4 / C2, MAX / (C4 / C2));
}
@Override
public long toGiga(long size) {
return x(size, C4 / C3, MAX / (C4 / C3));
}
@Override
public long toTera(long size) {
return size;
}
@Override
public long toPeta(long size) {
return size / (C5 / C0);
}
},
PETA {
@Override
public long toScalar(long size) {
return x(size, C5 / C0, MAX / (C5 / C0));
}
@Override
public long toKilo(long size) {
return x(size, C5 / C1, MAX / (C5 / C1));
}
@Override
public long toMega(long size) {
return x(size, C5 / C2, MAX / (C5 / C2));
}
@Override
public long toGiga(long size) {
return x(size, C5 / C3, MAX / (C5 / C3));
}
@Override
public long toTera(long size) {
return x(size, C5 / C4, MAX / (C5 / C4));
}
@Override
public long toPeta(long size) {
return size;
}
};
static final long C0 = 1L;
static final long C1 = C0 * 1000L;
static final long C2 = C1 * 1000L;
static final long C3 = C2 * 1000L;
static final long C4 = C3 * 1000L;
static final long C5 = C4 * 1000L;
static final long MAX = Long.MAX_VALUE;
/**
* Scale d by m, checking for overflow.
* This has a short name to make above code more readable.
*/
static long x(long d, long m, long over) {
if (d > over) {
return Long.MAX_VALUE;
}
if (d < -over) {
return Long.MIN_VALUE;
}
return d * m;
}
public abstract long toScalar(long size);
public abstract long toKilo(long size);
public abstract long toMega(long size);
public abstract long toGiga(long size);
public abstract long toTera(long size);
public abstract long toPeta(long size);
}

View file

@ -0,0 +1,259 @@
package org.xbib.datastructures.api;
import java.util.concurrent.TimeUnit;
/**
*
*/
public class TimeValue {
private static final long C0 = 1L;
private static final long C1 = C0 * 1000L;
private static final long C2 = C1 * 1000L;
private static final long C3 = C2 * 1000L;
private static final long C4 = C3 * 60L;
private static final long C5 = C4 * 60L;
private static final long C6 = C5 * 24L;
private long duration;
private TimeUnit timeUnit;
private TimeValue() {
}
public TimeValue(long millis) {
this(millis, TimeUnit.MILLISECONDS);
}
public TimeValue(long duration, TimeUnit timeUnit) {
this.duration = duration;
this.timeUnit = timeUnit;
}
public static TimeValue timeValueNanos(long nanos) {
return new TimeValue(nanos, TimeUnit.NANOSECONDS);
}
public static TimeValue timeValueMillis(long millis) {
return new TimeValue(millis, TimeUnit.MILLISECONDS);
}
public static TimeValue timeValueSeconds(long seconds) {
return new TimeValue(seconds, TimeUnit.SECONDS);
}
public static TimeValue timeValueMinutes(long minutes) {
return new TimeValue(minutes, TimeUnit.MINUTES);
}
public static TimeValue timeValueHours(long hours) {
return new TimeValue(hours, TimeUnit.HOURS);
}
/**
* Format the double value with a single decimal points, trimming trailing '.0'.
*
* @param value value
* @param suffix suffix
* @return string
*/
public static String format1Decimals(double value, String suffix) {
String p = String.valueOf(value);
int ix = p.indexOf('.') + 1;
int ex = p.indexOf('E');
char fraction = p.charAt(ix);
if (fraction == '0') {
if (ex != -1) {
return p.substring(0, ix - 1) + p.substring(ex) + suffix;
} else {
return p.substring(0, ix - 1) + suffix;
}
} else {
if (ex != -1) {
return p.substring(0, ix) + fraction + p.substring(ex) + suffix;
} else {
return p.substring(0, ix) + fraction + suffix;
}
}
}
public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue) {
if (sValue == null) {
return defaultValue;
}
long millis;
if (sValue.endsWith("S")) {
millis = Long.parseLong(sValue.substring(0, sValue.length() - 1));
} else if (sValue.endsWith("ms")) {
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)));
} else if (sValue.endsWith("s")) {
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 1000);
} else if (sValue.endsWith("m")) {
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 60 * 1000);
} else if (sValue.endsWith("H") || sValue.endsWith("h")) {
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 60 * 60 * 1000);
} else if (sValue.endsWith("d")) {
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 24 * 60 * 60 * 1000);
} else if (sValue.endsWith("w")) {
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 7 * 24 * 60 * 60 * 1000);
} else {
millis = Long.parseLong(sValue);
}
return new TimeValue(millis, TimeUnit.MILLISECONDS);
}
public long nanos() {
return timeUnit.toNanos(duration);
}
public long getNanos() {
return nanos();
}
public long micros() {
return timeUnit.toMicros(duration);
}
public long getMicros() {
return micros();
}
public long millis() {
return timeUnit.toMillis(duration);
}
public long getMillis() {
return millis();
}
public long seconds() {
return timeUnit.toSeconds(duration);
}
public long getSeconds() {
return seconds();
}
public long minutes() {
return timeUnit.toMinutes(duration);
}
public long getMinutes() {
return minutes();
}
public long hours() {
return timeUnit.toHours(duration);
}
public long getHours() {
return hours();
}
public long days() {
return timeUnit.toDays(duration);
}
public long getDays() {
return days();
}
public double microsFrac() {
return ((double) nanos()) / C1;
}
public double getMicrosFrac() {
return microsFrac();
}
public double millisFrac() {
return ((double) nanos()) / C2;
}
public double getMillisFrac() {
return millisFrac();
}
public double secondsFrac() {
return ((double) nanos()) / C3;
}
public double getSecondsFrac() {
return secondsFrac();
}
public double minutesFrac() {
return ((double) nanos()) / C4;
}
public double getMinutesFrac() {
return minutesFrac();
}
public double hoursFrac() {
return ((double) nanos()) / C5;
}
public double getHoursFrac() {
return hoursFrac();
}
public double daysFrac() {
return ((double) nanos()) / C6;
}
public double getDaysFrac() {
return daysFrac();
}
@Override
public String toString() {
if (duration < 0) {
return Long.toString(duration);
}
long nanos = nanos();
if (nanos == 0) {
return "0s";
}
double value = nanos;
String suffix = "nanos";
if (nanos >= C6) {
value = daysFrac();
suffix = "d";
} else if (nanos >= C5) {
value = hoursFrac();
suffix = "h";
} else if (nanos >= C4) {
value = minutesFrac();
suffix = "m";
} else if (nanos >= C3) {
value = secondsFrac();
suffix = "s";
} else if (nanos >= C2) {
value = millisFrac();
suffix = "ms";
} else if (nanos >= C1) {
value = microsFrac();
suffix = "micros";
}
return format1Decimals(value, suffix);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TimeValue timeValue = (TimeValue) o;
return duration == timeValue.duration && timeUnit == timeValue.timeUnit;
}
@Override
public int hashCode() {
int result = (int) (duration ^ (duration >>> 32));
result = 31 * result + (timeUnit != null ? timeUnit.hashCode() : 0);
return result;
}
}

View file

@ -0,0 +1,9 @@
package org.xbib.datastructures.api;
public interface ValueNode extends Node<Object> {
void set(Object value);
@Override
Object get();
}

View file

@ -25,14 +25,14 @@ public class Reader {
private static final Map<Character, Character> escapes = new HashMap<Character, Character>();
static {
escapes.put(new Character('"'), new Character('"'));
escapes.put(new Character('\\'), new Character('\\'));
escapes.put(new Character('/'), new Character('/'));
escapes.put(new Character('b'), new Character('\b'));
escapes.put(new Character('f'), new Character('\f'));
escapes.put(new Character('n'), new Character('\n'));
escapes.put(new Character('r'), new Character('\r'));
escapes.put(new Character('t'), new Character('\t'));
escapes.put('"', '"');
escapes.put('\\', '\\');
escapes.put('/', '/');
escapes.put('b', '\b');
escapes.put('f', '\f');
escapes.put('n', '\n');
escapes.put('r', '\r');
escapes.put('t', '\t');
}
private CharacterIterator it;
@ -272,9 +272,9 @@ public class Reader {
if (c == 'u') {
add(unicode());
} else {
Object value = escapes.get(new Character(c));
Character value = escapes.get(c);
if (value != null) {
add(((Character) value).charValue());
add(value);
}
}
} else {

View file

@ -15,7 +15,7 @@ public class JSONParser {
public static final int LONG = 2;
/**
* Event indicating a JSON number value which has a fractional part or an exponent
* and with string length <= 23 chars not including sign. This covers
* and with string length &lt;= 23 chars not including sign. This covers
* all representations of normal values for Double.toString().
*/
public static final int NUMBER = 3;

View file

@ -1,3 +1,4 @@
dependencies {
api project(':datastructures-api')
api project(':datastructures-tiny')
}

View file

@ -0,0 +1,9 @@
import org.xbib.datastructures.api.DataStructure;
import org.xbib.datastructures.json.tiny.Json;
module org.xbib.datastructures.json.tiny {
exports org.xbib.datastructures.json.tiny;
requires org.xbib.datastructures.api;
requires org.xbib.datastructures.tiny;
provides DataStructure with Json;
}

View file

@ -1,8 +1,10 @@
package org.xbib.datastructures.json;
package org.xbib.datastructures.json.tiny;
import org.xbib.datastructures.api.Node;
import java.util.Deque;
public class EmptyJsonListener implements JsonDeserializer {
public class EmptyJsonListener implements JsonResult {
@Override
public void begin() {
@ -48,18 +50,15 @@ public class EmptyJsonListener implements JsonDeserializer {
}
@Override
public void beginList() {
public void beginCollection() {
}
@Override
public void endList() {
public void endCollection() {
}
@Override
public void beginMap() {
}
@Override
@ -67,7 +66,6 @@ public class EmptyJsonListener implements JsonDeserializer {
}
@Override
public Deque<Node<?>> getStack() {
return null;
}

View file

@ -1,4 +1,4 @@
package org.xbib.datastructures.json;
package org.xbib.datastructures.json.tiny;
import java.util.Objects;