access to way-context vars from (no caching) node-context

This commit is contained in:
Arndt Brenschede 2017-07-02 22:55:54 +02:00
parent 313592ebd3
commit d4f592732f
41 changed files with 4695 additions and 3 deletions

View file

@ -64,7 +64,8 @@ public final class ProfileCache
BExpressionContextGlobal expctxGlobal = new BExpressionContextGlobal( meta );
rc.expctxWay = new BExpressionContextWay( rc.memoryclass * 512, meta );
rc.expctxNode = new BExpressionContextNode( rc.memoryclass * 128, meta );
rc.expctxNode = new BExpressionContextNode( 0, meta );
rc.expctxNode.setForeignContext( rc.expctxWay );
meta.readMetaData( new File( profileDir, "lookups.dat" ) );

View file

@ -24,6 +24,7 @@ final class BExpression
private static final int LOOKUP_EXP = 32;
private static final int NUMBER_EXP = 33;
private static final int VARIABLE_EXP = 34;
private static final int FOREIGN_VARIABLE_EXP = 35;
private int typ;
private BExpression op1;
@ -138,6 +139,8 @@ final class BExpression
exp.typ = ASSIGN_EXP;
String variable = ctx.parseToken();
if ( variable == null ) throw new IllegalArgumentException( "unexpected end of file" );
if ( variable.indexOf( '=' ) >= 0 ) throw new IllegalArgumentException( "variable name cannot contain '=': " + variable );
if ( variable.indexOf( ':' ) >= 0 ) throw new IllegalArgumentException( "cannot assign context-prefixed variable: " + variable );
exp.variableIdx = ctx.getVariableIdx( variable, true );
if ( exp.variableIdx < ctx.getMinWriteIdx() ) throw new IllegalArgumentException( "cannot assign to readonly variable " + variable );
}
@ -175,6 +178,13 @@ final class BExpression
}
}
}
else if ( ( idx = operator.indexOf( ':' ) ) >= 0 )
{
String context = operator.substring( 0, idx );
String varname = operator.substring( idx+1 );
exp.typ = FOREIGN_VARIABLE_EXP;
exp.variableIdx = ctx.getForeignVariableIdx( context, varname );
}
else if ( (idx = ctx.getVariableIdx( operator, false )) >= 0 )
{
exp.typ = VARIABLE_EXP;
@ -257,6 +267,7 @@ final class BExpression
case LOOKUP_EXP: return ctx.getLookupMatch( lookupNameIdx, lookupValueIdxArray );
case NUMBER_EXP: return numberValue;
case VARIABLE_EXP: return ctx.getVariableValue( variableIdx );
case FOREIGN_VARIABLE_EXP: return ctx.getForeignVariableValue( variableIdx );
case NOT_EXP: return op1.evaluate(ctx) == 0.f ? 1.f : 0.f;
default: throw new IllegalArgumentException( "unknown op-code: " + typ );
}

View file

@ -70,6 +70,8 @@ public abstract class BExpressionContext implements IByteArrayUnifier
private float[] currentVars;
private int currentVarOffset;
private BExpressionContext foreignContext;
protected void setInverseVars()
{
currentVarOffset = nBuildInVars;
@ -108,9 +110,12 @@ public abstract class BExpressionContext implements IByteArrayUnifier
if ( Boolean.getBoolean( "disableExpressionCache" ) ) hashSize = 1;
// create the expression cache
if ( hashSize > 0 )
{
cache = new LruMap( 4*hashSize, hashSize );
resultVarCache = new LruMap( 4096, 4096 );
}
}
/**
* encode internal lookup data to a byte array
@ -348,6 +353,18 @@ public abstract class BExpressionContext implements IByteArrayUnifier
requests++;
lookupDataValid = false; // this is an assertion for a nasty pifall
if ( cache == null )
{
decode( lookupData, inverseDirection, ab );
if ( currentVars == null || currentVars.length != nBuildInVars )
{
currentVars = new float[nBuildInVars];
}
evaluateInto( currentVars, 0 );
currentVarOffset = 0;
return;
}
CacheNode cn;
if ( lastCacheNode.ab == ab )
{
@ -661,6 +678,50 @@ public abstract class BExpressionContext implements IByteArrayUnifier
return num != null && lookupData[num.intValue()] == 2;
}
public int getOutputVariableIndex( String name )
{
int idx = getVariableIdx( name, false );
if ( idx < 0 )
{
throw new IllegalArgumentException( "unknown variable: " + name );
}
if ( idx < minWriteIdx )
{
throw new IllegalArgumentException( "bad access to global variable: " + name );
}
for( int i=0; i<nBuildInVars; i++ )
{
if ( buildInVariableIdx[i] == idx )
{
return i;
}
}
int[] extended = new int[nBuildInVars + 1];
System.arraycopy( buildInVariableIdx, 0, extended, 0, nBuildInVars );
extended[nBuildInVars] = idx;
buildInVariableIdx = extended;
return nBuildInVars++;
}
public void setForeignContext( BExpressionContext foreignContext )
{
this.foreignContext = foreignContext;
}
public float getForeignVariableValue( int foreignIndex )
{
return foreignContext.getBuildInVariable( foreignIndex );
}
public int getForeignVariableIdx( String context, String name )
{
if ( foreignContext == null || !context.equals( foreignContext.context ) )
{
throw new IllegalArgumentException( "unknown foreign context: " + context );
}
return foreignContext.getOutputVariableIndex( name );
}
public void parseFile( File file, String readOnlyContext )
{
if ( !file.exists() )

View file

@ -0,0 +1,154 @@
package btools.mapdecoder;
public final class BitReadBuffer
{
private byte[] ab;
private int idxMax;
private int idx = -1;
private int bits; // bits left in buffer
private long b;
public BitReadBuffer( byte[] ab )
{
this.ab = ab;
idxMax = ab.length-1;
}
public boolean decodeBit()
{
fillBuffer();
boolean value = ( ( b & 1L ) != 0 );
b >>>= 1;
bits--;
return value;
}
public long decodeBits( int count )
{
if ( count == 0 )
{
return 0;
}
fillBuffer();
long mask = -1L >>> ( 64 - count );
long value = b & mask;
b >>>= count;
bits -= count;
return value;
}
/**
* decode an integer in the range 0..max (inclusive).
*/
public long decodeBounded( long max )
{
long value = 0;
long im = 1; // integer mask
fillBuffer();
while (( value | im ) <= max)
{
if ( ( b & 1 ) != 0 )
value |= im;
b >>>= 1;
bits--;
im <<= 1;
}
return value;
}
/**
* decode a small number with a variable bit length
* (poor mans huffman tree)
* 1 -> 0
* 01 -> 1 + following 1-bit word ( 1..2 )
* 001 -> 3 + following 2-bit word ( 3..6 )
* 0001 -> 7 + following 3-bit word ( 7..14 ) etc.
*/
public int decodeInt()
{
long range = 1;
int cnt = 1;
fillBuffer();
while ((b & range) == 0)
{
range = (range << 1) | 1;
cnt++;
}
b >>>= cnt;
bits -= cnt;
return (int)((range >>> 1) + ( cnt > 1 ? decodeBits( cnt-1 ) : 0 ));
}
/**
* double-log variant of decodeVarBits better suited for
* distributions with a big-number tail
*/
public long decodeLong()
{
int n = decodeInt();
return (1L << n) + decodeBits( n ) - 1L;
}
public long[] decodeSortedArray()
{
int size = decodeInt();
long[] values = new long[size];
if ( size == 0 )
{
return values;
}
int offset = 0;
long value = 0;
int bits = decodeInt();
int[] sizestack = new int[bits];
int stackpointer = 0;
for(;;)
{
while( size > 1 && bits > 0 )
{
int size2 = (int)decodeBounded( size );
sizestack[stackpointer++] = size2;
size -= size2;
value <<= 1;
bits--;
}
if ( size == 1 )
{
values[offset++] = (value << bits) | decodeBits( bits );
}
else
{
while (size-- > 0)
{
values[offset++] = value;
}
}
if ( stackpointer == 0 )
{
return values;
}
while ( ( value & 1L ) == 1L )
{
value >>= 1;
bits++;
}
value |= 1L;
size = sizestack[--stackpointer];
}
}
private void fillBuffer()
{
while (bits <= 56)
{
if ( idx < idxMax )
{
b |= (ab[++idx] & 0xffL) << bits;
}
bits += 8;
}
}
}

View file

@ -0,0 +1,44 @@
package btools.mapdecoder;
/**
* Decoder for unicode characters, using simple (1st order) huffmann
*/
public final class CharDecoder extends HuffmannTreeDecoder<Character>
{
private long[] alphabet;
private int range;
private char[] buffer = new char[64];
public CharDecoder( BitReadBuffer brb )
{
super( brb );
}
@Override
protected Object decodeTree()
{
alphabet = brb.decodeSortedArray();
range = alphabet.length - 1;
System.out.println( "decoded alphabet of length " + alphabet.length + " idx3 = " + alphabet[3] );
return super.decodeTree();
}
protected Character decodeItem()
{
int idx = (int)brb.decodeBounded( range );
long lc = alphabet[idx];
System.out.println( "decoded item: c=" + ((char)lc) + " idx=" + idx );
return Character.valueOf( (char)lc );
}
public String decodeString()
{
int n = brb.decodeInt();
char[] b = n <= buffer.length ? buffer : new char[n];
for( int i=0; i<n; i++ )
{
b[i] = decode().charValue();
}
return new String( b, 0, n );
}
}

View file

@ -0,0 +1,59 @@
package btools.mapdecoder;
/**
* Decoder for static huffmann coded data
*/
public abstract class HuffmannTreeDecoder<V>
{
private Object tree;
protected BitReadBuffer brb;
protected HuffmannTreeDecoder( BitReadBuffer brb )
{
this.brb = brb;
tree = decodeTree();
}
public V decode()
{
Object node = tree;
while (node instanceof TreeNode)
{
TreeNode tn = (TreeNode) node;
node = brb.decodeBit() ? tn.child2 : tn.child1;
}
if ( node == null )
{
return decodeItem(); // inline item
}
return (V) node;
}
protected Object decodeTree()
{
boolean isNode = brb.decodeBit();
if ( isNode )
{
TreeNode node = new TreeNode();
node.child1 = decodeTree();
node.child2 = decodeTree();
return node;
}
boolean isInlinePrefix = brb.decodeBit();
if ( isInlinePrefix )
{
return null;
}
return decodeItem();
}
private static final class TreeNode
{
public Object child1;
public Object child2;
}
protected abstract V decodeItem();
}

View file

@ -0,0 +1,70 @@
package btools.mapdecoder;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.zip.Inflater;
/**
* Manage the mapping between locale and native node indexes
*/
public class LocaleIndexMapping
{
private int[] refZoomDelta;
private int[] refNativeIndex;
private OsmTile[] tileForZoomDelta;
public LocaleIndexMapping( OsmTile tile, BitReadBuffer brb ) throws Exception
{
// prepare the locale index array
int localeNodeCount = brb.decodeInt();
refZoomDelta = new int[localeNodeCount];
refNativeIndex = new int[localeNodeCount];
tileForZoomDelta = new OsmTile[tile.zoom + 1];
for( OsmTile t = tile; t != null; t = t.parent )
{
tileForZoomDelta[tile.zoom-t.zoom] = t;
}
// decode the down-zoom refs
for( int zoomDelta=tile.zoom; zoomDelta > 0; zoomDelta-- )
{
long[] localeIndexes = brb.decodeSortedArray();
long[] nativeIndexes = brb.decodeSortedArray();
for( int i=0; i<localeIndexes.length; i++ )
{
int idx = (int)localeIndexes[i];
refZoomDelta[idx] = zoomDelta;
refNativeIndex[idx] = (int)nativeIndexes[i];
}
}
// prepare locale->native mapping for zoomDelta=0
int localeIdx = 0;
int nodecount = tile.nodePositions.length;
for( int i=0; i<nodecount; i++)
{
while( refZoomDelta[localeIdx] != 0 )
{
localeIdx++;
}
refNativeIndex[localeIdx++] = i;
}
}
public OsmNode nodeForLocaleIndex( int localeIndex )
{
int zoomDelta = refZoomDelta[localeIndex];
int nativeIndex = refNativeIndex[localeIndex];
return tileForZoomDelta[zoomDelta].nodes.get( nativeIndex );
}
public OsmWay getWay( int zoomDelta, int nativeIndex )
{
return tileForZoomDelta[zoomDelta].ways.get( nativeIndex );
}
}

View file

@ -0,0 +1,63 @@
package btools.mapdecoder;
import java.util.List;
public class NodeTreeElement
{
public int offset;
public int nnodes;
public NodeTreeElement child0;
public NodeTreeElement child1;
public List<OsmNode> nodes;
public static NodeTreeElement createNodeTree( long[] values, int offset, int subsize, long nextbit, long mask )
{
if ( nextbit == 0 )
{
return null;
}
if ( subsize < 1 )
{
return null;
}
long data = mask & values[offset];
mask |= nextbit;
// count 0-bit-fraction
int i = offset;
int end = subsize + offset;
for ( ; i < end; i++ )
{
if ( ( values[i] & mask ) != data )
{
break;
}
}
int size1 = i - offset;
int size2 = subsize - size1;
System.out.println( "createNodeTree: offset=" + offset + " subsize=" + subsize + " size1=" + size1 + " size2=" + size2 );
NodeTreeElement nte = new NodeTreeElement();
nte.offset = offset;
nte.nnodes = subsize;
nte.child0 = createNodeTree( values, offset, size1, nextbit >> 1, mask );
nte.child1 = createNodeTree( values, i, size2, nextbit >> 1, mask );
return nte;
}
public String toString()
{
return " child0=" + (child0 != null ) + " child1=" + (child1 != null );
}
}

View file

@ -0,0 +1,34 @@
package btools.mapdecoder;
public class OsmNode extends OsmObject
{
public int ilon;
public int ilat;
public boolean inBBox( int z, int x, int y )
{
int shift = 28-z;
int x0 = x << shift;
int x1 = (x+1) << shift;
int y0 = y << shift;
int y1 = (y+1) << shift;
boolean outofbox = x1 < ilon || x0 >= ilon || y1 < ilat || y0 >= ilat;
return !outofbox;
}
public static double gudermannian(double y)
{
return Math.atan(Math.sinh(y)) * (180. / Math.PI);
}
public double getLon()
{
return (((double)ilon)/( 1L << 27 ) - 1.)*180.;
}
public double getLat()
{
double y = (1. - ((double)ilat)/( 1L << 27 ))*Math.PI;
return gudermannian(y);
}
}

View file

@ -0,0 +1,12 @@
package btools.mapdecoder;
import java.util.Map;
/**
* Base class of Nodes, Ways and Relations
*/
public class OsmObject
{
public int id;
public Map<String,String> tags;
}

View file

@ -0,0 +1,48 @@
package btools.mapdecoder;
import java.util.List;
public class OsmRelation extends OsmObject
{
public List<OsmRelationMember> members;
// bounding box
int minx;
int miny;
int maxx;
int maxy;
public void calcBBox()
{
for( int i=0; i<members.size(); i++ )
{
OsmWay w = members.get(i).way;
if ( i == 0 )
{
minx = w.minx;
maxx = w.maxx;
miny = w.miny;
maxy = w.maxy;
}
else
{
if ( w.minx < minx ) minx = w.minx;
if ( w.maxx > maxx ) maxx = w.maxx;
if ( w.miny < miny ) miny = w.miny;
if ( w.maxy > maxy ) maxy = w.maxy;
}
}
}
public boolean inBBox( int z, int x, int y )
{
int shift = 28-z;
int x0 = x << shift;
int x1 = (x+1) << shift;
int y0 = y << shift;
int y1 = (y+1) << shift;
boolean outofbox = x1 < minx || x0 >= maxx || y1 < miny || y0 >= maxy;
return !outofbox;
}
}

View file

@ -0,0 +1,9 @@
package btools.mapdecoder;
import java.util.List;
public class OsmRelationMember extends OsmObject
{
public OsmWay way;
public String role;
}

View file

@ -0,0 +1,35 @@
package btools.mapdecoder;
import java.util.Collections;
import java.util.List;
/**
* Container for waydata on the preprocessor level
*
* @author ab
*/
public class OsmTile
{
public OsmTile parent;
public long sourceId;
public int zoom;
public int x;
public int y;
private static List<OsmNode> emptyNodes = Collections.EMPTY_LIST;
private static List<OsmWay> emptyWays = Collections.EMPTY_LIST;
private static List<OsmRelation> emptyRelations = Collections.EMPTY_LIST;
public List<OsmNode> nodes = emptyNodes;
public List<OsmWay> ways = emptyWays;
public List<OsmRelation> relations = emptyRelations;
public long[] nodePositions;
public String toString()
{
return "z=" + zoom+ " x=" + x + " y=" + y + " nodes=" + nodes.size() + " ways=" + ways.size() + " rels=" + relations.size();
}
}

View file

@ -0,0 +1,45 @@
package btools.mapdecoder;
import java.util.List;
public class OsmWay extends OsmObject
{
public List<OsmNode> nodes;
// bounding box
int minx;
int miny;
int maxx;
int maxy;
public void calcBBox()
{
for( int i=0; i<nodes.size(); i++ )
{
OsmNode n = nodes.get(i);
if ( i == 0 )
{
minx = maxx = n.ilon;
miny = maxy = n.ilat;
}
else
{
if ( n.ilon < minx ) minx = n.ilon;
if ( n.ilon > maxx ) maxx = n.ilon;
if ( n.ilat < miny ) miny = n.ilat;
if ( n.ilat > maxy ) maxy = n.ilat;
}
}
}
public boolean inBBox( int z, int x, int y )
{
int shift = 28-z;
int x0 = x << shift;
int x1 = (x+1) << shift;
int y0 = y << shift;
int y1 = (y+1) << shift;
boolean outofbox = x1 < minx || x0 >= maxx || y1 < miny || y0 >= maxy;
return !outofbox;
}
}

View file

@ -0,0 +1,30 @@
package btools.mapdecoder;
/**
* Decoder for a set of tags
*
* Only tagsets detected at least twice
* have their own huffmann-codes, those
* detected only once are coded inline
*/
public final class TagSetDecoder extends HuffmannTreeDecoder<int[]>
{
public TagSetDecoder( BitReadBuffer brb )
{
super( brb );
}
protected int[] decodeItem()
{
int tagcount = brb.decodeInt();
int[] data = new int[tagcount];
int lastIdx = -1;
for( int i=0; i<tagcount; i++ )
{
int idx = lastIdx + 1 + brb.decodeInt();
data[i] = idx;
lastIdx = idx;
}
return data;
}
}

View file

@ -0,0 +1,88 @@
package btools.mapdecoder;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
/**
* Decoder for the list of tags and their value-trees
*/
public class TagValueDecoder
{
private int nextStringStart = 0;
private byte[] textHeader;
private ArrayList<String> stringList;
private ArrayList<Tag> taglist;
private int roleIdx;
private String decodeString( BitReadBuffer brb )
{
boolean newIdx = brb.decodeBit();
if ( newIdx )
{
int slen = brb.decodeInt();
try
{
String s = new String( textHeader, nextStringStart, slen, "UTF8" );
nextStringStart += slen;
stringList.add( s );
return s;
}
catch( UnsupportedEncodingException uee )
{
throw new RuntimeException( uee );
}
}
int idx = (int)brb.decodeBounded( stringList.size()-1 );
return stringList.get( idx );
}
private class Tag extends HuffmannTreeDecoder<String>
{
String name;
Tag( BitReadBuffer brb, String tagName )
{
super( brb );
name = tagName;
}
protected String decodeItem()
{
return decodeString( brb );
}
}
public TagValueDecoder( BitReadBuffer brb, byte[] textHeader )
{
this.textHeader = textHeader;
stringList = new ArrayList<String>();
int ntags = brb.decodeInt();
taglist = new ArrayList<Tag>();
for( int i=0; i<ntags; i++ )
{
String tagName = decodeString( brb );
taglist.add( new Tag( brb, tagName ) );
if ( "role".equals( tagName ) )
{
roleIdx = i;
}
}
}
public String getTagName( int idx )
{
return taglist.get(idx).name;
}
public String decodeValue( int tagIdx )
{
return taglist.get( tagIdx ).decode();
}
public String decodeRole()
{
return taglist.get( roleIdx ).decode();
}
}

View file

@ -0,0 +1,207 @@
package btools.mapdecoder;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.zip.Inflater;
/**
* TileEncoder decodes a compressed osm tile
*/
public class TileDecoder
{
private TagSetDecoder tagSetDecoder;
private TagValueDecoder tagValueDecoder;
public static void main( String[] args ) throws Exception
{
OsmTile t = new TileDecoder().process( new File( args[0] ), null, Integer.parseInt( args[1] ), Integer.parseInt( args[2] ), Integer.parseInt( args[3] ) );
while( t != null )
{
System.out.println( "decoded: " + t );
t = t.parent;
}
}
public OsmTile process( File tileDir, OsmTile template, int zoom, int x, int y ) throws Exception
{
long sourceId = tileDir.getAbsolutePath().hashCode();
// look for a match in the template
for( OsmTile tile = template; tile != null; tile = tile.parent )
{
if ( tile.zoom == zoom && tile.x == x && tile.y == y && tile.sourceId == sourceId )
{
return tile;
}
}
OsmTile td = new OsmTile();
td.sourceId = sourceId;
td.zoom = zoom;
td.x = x;
td.y = y;
if ( zoom > 0 )
{
td.parent = new TileDecoder().process( tileDir, template, zoom-1, x >> 1, y >> 1 );
}
File file = new File( new File( tileDir, "" + zoom ), x + "_" + y + ".osb" );
if ( !file.exists() )
{
return td;
}
DataInputStream dis = new DataInputStream( new FileInputStream( file ) );
int textHeaderLen = dis.readInt();
int textHeaderCompressedLen = dis.readInt();
byte[] textHeaderCompressed = new byte[textHeaderCompressedLen];
dis.readFully( textHeaderCompressed );
byte[] textHeader = new byte[textHeaderLen];
Inflater decompresser = new Inflater();
decompresser.setInput( textHeaderCompressed );
int rawlen = decompresser.inflate( textHeader );
int bufferLen = dis.readInt();
byte[] buffer = new byte[bufferLen];
dis.readFully( buffer );
BitReadBuffer brb = new BitReadBuffer( buffer );
dis.close();
tagSetDecoder = new TagSetDecoder( brb );
tagValueDecoder = new TagValueDecoder( brb, textHeader );
// decode the node positions
td.nodePositions = brb.decodeSortedArray();
int nodecount = td.nodePositions.length;
td.nodes = new ArrayList<OsmNode>(nodecount);
int shift = 56-2*zoom;
long offset = (encodeMorton( x ) << shift) + (encodeMorton( y ) << (shift+1) );
for ( int nidx = 0; nidx < nodecount; nidx++ )
{
OsmNode n = new OsmNode();
long z = offset + td.nodePositions[nidx];
n.id = nidx;
n.ilon = decodeMorton( z );
n.ilat = decodeMorton( z >> 1 );
td.nodes.add( n );
}
LocaleIndexMapping indexMapping = new LocaleIndexMapping( td, brb );
// decode tagged nodes
long[] taggedIndexes = brb.decodeSortedArray();
int ntaggedNodes = taggedIndexes.length;
for( int tnidx=0; tnidx<ntaggedNodes; tnidx++ )
{
int idx = (int)taggedIndexes[tnidx];
td.nodes.get( idx ).tags = decodeTagValues();
}
// decode ways
long[] startIndexes = brb.decodeSortedArray();
int nways = startIndexes.length;
td.ways = new ArrayList<OsmWay>( nways );
for( int widx=0; widx<nways; widx++ )
{
OsmWay w = new OsmWay();
w.tags = decodeTagValues();
int[] nodeIndexes = decodeWayNodes( (int)startIndexes[widx], brb );
w.nodes = new ArrayList<OsmNode>( nodeIndexes.length );
for( int i=0; i<nodeIndexes.length; i++ )
{
w.nodes.add( indexMapping.nodeForLocaleIndex( nodeIndexes[i] ) );
}
w.calcBBox();
td.ways.add( w );
}
// decode relations
int nrels = brb.decodeInt();
td.relations = new ArrayList<OsmRelation>( nrels );
for( int ridx=0; ridx<nrels; ridx++ )
{
OsmRelation r = new OsmRelation();
r.tags = decodeTagValues();
int nmembers = brb.decodeInt();
r.members = new ArrayList<OsmRelationMember>(nmembers);
for( int midx = 0; midx<nmembers; midx++ )
{
OsmRelationMember m = new OsmRelationMember();
int zoomDelta = brb.decodeInt();
int nativeIndex = brb.decodeInt();
m.role = tagValueDecoder.decodeRole();
m.way = indexMapping.getWay( zoomDelta, nativeIndex );
r.members.add( m );
}
r.calcBBox();
td.relations.add( r );
}
return td;
}
private int[] decodeWayNodes( int startIdx, BitReadBuffer brb )
{
boolean closedPoly = brb.decodeBit();
int nnodes = brb.decodeInt() + 2;
int[] ids = new int[ closedPoly ? nnodes+1 : nnodes ];
int lastIdx = startIdx;
ids[0] = startIdx;
for( int i=1; i<nnodes; i++ )
{
boolean negative = brb.decodeBit();
int delta = (int)brb.decodeLong() + 1;
ids[i] = lastIdx = lastIdx + (negative ? -delta : delta );
}
if ( closedPoly )
{
ids[nnodes] = startIdx;
}
return ids;
}
private HashMap<String,String> decodeTagValues()
{
HashMap<String,String> map = new HashMap<String,String>();
int[] tagSet = tagSetDecoder.decode();
for( int i=0; i<tagSet.length; i++ )
{
int tagIdx = tagSet[i];
String key = tagValueDecoder.getTagName( tagIdx );
String value = tagValueDecoder.decodeValue( tagIdx );
map.put( key, value );
}
return map;
}
public static int decodeMorton( long z )
{
long x = z & 0x5555555555555555L;
x = (x | (x >> 1)) & 0x3333333333333333L;
x = (x | (x >> 2)) & 0x0F0F0F0F0F0F0F0FL;
x = (x | (x >> 4)) & 0x00FF00FF00FF00FFL;
x = (x | (x >> 8)) & 0x0000FFFF0000FFFFL;
return (int)(x | (x >> 16));
}
public static long encodeMorton( int x )
{
long z = x & 0xFFFFFFFFL;
z = (z | (z << 16)) & 0x0000FFFF0000FFFFL;
z = (z | (z << 8)) & 0x00FF00FF00FF00FFL;
z = (z | (z << 4)) & 0x0F0F0F0F0F0F0F0FL;
z = (z | (z << 2)) & 0x3333333333333333L;
return (z|(z << 1)) & 0x5555555555555555L;
}
}

View file

@ -0,0 +1,231 @@
package btools.mapsplitter;
import java.util.TreeMap;
public final class BitWriteBuffer
{
private static TreeMap<String, long[]> statsPerName;
private long lastbitpos = 0;
private byte[] ab;
private int idxMax;
private int idx = -1;
private int bm = 0x100; // byte mask (write mode)
private int b;
public BitWriteBuffer( byte[] ab )
{
this.ab = ab;
idxMax = ab.length-1;
}
/**
* encode a distance with a variable bit length
* (poor mans huffman tree)
* 1 -> 0
* 01 -> 1 + following 1-bit word ( 1..2 )
* 001 -> 3 + following 2-bit word ( 3..6 )
* 0001 -> 7 + following 3-bit word ( 7..14 ) etc.
*
* @see #decodeVarBits
*/
public void encodeInt( int value )
{
int range = 0;
while (value > range)
{
encodeBit( false );
value -= range + 1;
range = 2 * range + 1;
}
encodeBit( true );
encodeBounded( range, value );
}
public void encodeLong( long n )
{
int maxbit = 0;
long nn = n + 1L;
while( nn > 1L )
{
maxbit++;
nn >>= 1;
}
encodeInt( maxbit );
long range = 1 << maxbit;
encodeBounded( range-1L, n + 1L -range );
}
public void encodeBit( boolean value )
{
if ( bm == 0x100 )
{
bm = 1;
ab[++idx] = 0;
}
if ( value )
ab[idx] |= bm;
bm <<= 1;
}
/**
* encode an integer in the range 0..max (inclusive).
* For max = 2^n-1, this just encodes n bits, but in general
* this is variable length encoding, with the shorter codes
* for the central value range
*/
public void encodeBounded( long max, long value )
{
long im = 1L; // integer mask
while (im <= max)
{
if ( bm == 0x100 )
{
bm = 1;
ab[++idx] = 0;
}
if ( ( value & im ) != 0 )
{
ab[idx] |= bm;
max -= im;
}
bm <<= 1;
im <<= 1;
}
}
/**
* @return the encoded length in bytes
*/
public int getEncodedLength()
{
return idx + 1;
}
/**
* @return the encoded length in bits
*/
public long getWritingBitPosition()
{
long bitpos = idx << 3;
int m = bm;
while (m > 1)
{
bitpos++;
m >>= 1;
}
return bitpos;
}
public void encodeSortedArray( long[] values )
{
int size = values.length;
encodeInt( size );
if ( size == 0 )
{
return;
}
long maxValue = values[size-1];
int nbits = 0;
while ( maxValue > 0 )
{
nbits++;
maxValue >>= 1;
}
if ( nbits > 57 ) throw new IllegalArgumentException( "encodeSortedArray accepts 57-bit numbers at max" );
encodeInt( nbits );
encodeSortedArray( values, 0, size, ( 1L << nbits ) >> 1, 0L );
}
private void encodeSortedArray( long[] values, int offset, int subsize, long nextbit, long mask )
{
if ( subsize == 1 ) // last-choice shortcut
{
long bit = 1L;
while ( bit <= nextbit )
{
encodeBit( ( values[offset] & bit ) != 0 );
bit <<= 1;
}
return;
}
if ( nextbit == 0 )
{
return;
}
long data = mask & values[offset];
mask |= nextbit;
// count 0-bit-fraction
int i = offset;
int end = subsize + offset;
for ( ; i < end; i++ )
{
if ( ( values[i] & mask ) != data )
{
break;
}
}
int size1 = i - offset;
int size2 = subsize - size1;
encodeBounded( subsize, size2 );
if ( size1 > 0 )
{
encodeSortedArray( values, offset, size1, nextbit >> 1, mask );
}
if ( size2 > 0 )
{
encodeSortedArray( values, i, size2, nextbit >> 1, mask );
}
}
/**
* assign the de-/encoded bits since the last call assignBits to the given
* name. Used for encoding statistics
*
* @see #getBitReport
*/
public void assignBits( String name )
{
long bitpos = getWritingBitPosition();
if ( statsPerName == null )
{
statsPerName = new TreeMap<String, long[]>();
}
long[] stats = statsPerName.get( name );
if ( stats == null )
{
stats = new long[2];
statsPerName.put( name, stats );
}
stats[0] += bitpos - lastbitpos;
stats[1] += 1;
lastbitpos = bitpos;
}
/**
* Get a textual report on the bit-statistics
*
* @see #assignBits
*/
public static String getBitReport()
{
if ( statsPerName == null )
{
return "<empty bit report>";
}
StringBuilder sb = new StringBuilder();
for ( String name : statsPerName.keySet() )
{
long[] stats = statsPerName.get( name );
sb.append( name + " count=" + stats[1] + " bits=" + stats[0] + "\n" );
}
statsPerName = null;
return sb.toString();
}
}

View file

@ -0,0 +1,71 @@
package btools.mapsplitter;
import java.util.TreeMap;
/**
* Encoder for characters, using simple (1st order) huffmann
*/
public final class CharEncoder extends HuffmanTreeEncoder<Character>
{
private long[] alphabet;
private int range;
private TreeMap<Character,Integer> chars = new TreeMap<Character,Integer>();
public void encode( Character c )
{
if ( pass == 1 )
{
chars.put( c, null );
}
super.encode( c );
}
public void encodeDictionary( BitWriteBuffer bwb )
{
if ( pass == 1 ) // means 2...
{
int idx = 0;
alphabet = new long[chars.size()];
range = chars.size()-1;
for ( Character c : chars.keySet() )
{
System.out.println( "assigning index " + idx + " to char=" + c );
alphabet[idx] = c;
chars.put( c, Integer.valueOf( idx++ ) );
}
}
if ( alphabet != null )
{
bwb.encodeSortedArray( alphabet );
}
super.encodeDictionary( bwb );
}
protected void encodeItem( Character c )
{
int idx = chars.get( c ).intValue();
System.out.println( "encoding item: c=" + c + " idx=" + idx );
bwb.encodeBounded( range, idx );
}
@Override
public boolean itemEquals( Character c1, Character c2 )
{
if ( c1 == null )
{
return c2 == null;
}
if ( c2 == null )
{
return false;
}
return c1.charValue() == c2.charValue();
}
@Override
public int itemHashCode( Character c)
{
return c == 0 ? 0 : c.charValue();
}
}

View file

@ -0,0 +1,140 @@
package btools.mapsplitter;
import java.util.Comparator;
import java.util.HashMap;
import java.util.PriorityQueue;
/**
* Encoder for a set of tags
*
* It detects identical sets and sorts them
* into a huffman-tree according to their frequencies
*
* Adapted for 3-pass encoding (counters -> statistics -> encoding )
* but doesn't do anything at pass1
*/
public abstract class HuffmanTreeEncoder<V>
{
private HashMap<TreeNode, TreeNode> identityMap;
protected BitWriteBuffer bwb;
protected int pass;
private TreeNode freq1;
public void encode( V data )
{
if ( pass == 1 )
{
return;
}
TreeNode probe = new TreeNode();
probe.data = data;
TreeNode tn = identityMap.get( probe );
if ( pass == 3 )
{
if ( tn.frequency == 1 )
{
bwb.encodeBounded( freq1.range - 1, freq1.code );
encodeItem( data );
}
else
{
bwb.encodeBounded( tn.range - 1, tn.code );
}
}
else if ( pass == 2 )
{
if ( tn == null )
{
tn = probe;
identityMap.put( tn, tn );
}
tn.frequency++;
}
}
public void encodeDictionary( BitWriteBuffer bwb )
{
this.bwb = bwb;
if ( ++pass == 3 )
{
freq1 = new TreeNode();
PriorityQueue<TreeNode> queue = new PriorityQueue<TreeNode>(2*identityMap.size(), new Comparator<TreeNode>()
{
@Override
public int compare(TreeNode tn1, TreeNode tn2)
{
if ( tn1.frequency < tn2.frequency )
return -1;
if ( tn1.frequency > tn2.frequency )
return 1;
return 0;
}
} );
queue.add( freq1 );
while (queue.size() > 1)
{
TreeNode node = new TreeNode();
node.child1 = queue.poll();
node.child2 = queue.poll();
node.frequency = node.child1.frequency + node.child2.frequency;
queue.add( node );
}
TreeNode root = queue.poll();
root.encode( 1, 0 );
}
}
public HuffmanTreeEncoder()
{
identityMap = new HashMap<TreeNode, TreeNode>();
}
protected abstract void encodeItem( V data );
protected abstract boolean itemEquals( V i1, V i2 );
protected abstract int itemHashCode( V i);
public final class TreeNode
{
public V data;
public int frequency;
public int code;
public int range;
public TreeNode child1;
public TreeNode child2;
public void encode( int range, int code )
{
this.range = range;
this.code = code;
boolean isNode = child1 != null;
bwb.encodeBit( isNode );
if ( isNode )
{
child1.encode( range << 1, code );
child2.encode( range << 1, code + range );
}
else
{
bwb.encodeBit( data == null );
if ( data != null )
{
encodeItem( data );
}
}
}
@Override
public boolean equals( Object o )
{
return itemEquals( ((TreeNode)o).data, data );
}
@Override
public int hashCode()
{
return itemHashCode( data );
}
}
}

View file

@ -0,0 +1,173 @@
/**
* common base class for the map-filters
*
* @author ab
*/
package btools.mapsplitter;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import btools.util.DiffCoderDataOutputStream;
public abstract class MapCreatorBase implements WayListener, NodeListener, RelationListener
{
private DiffCoderDataOutputStream[] tileOutStreams;
protected File outTileDir;
protected HashMap<String,String> tags;
public void putTag( String key, String value )
{
if ( tags == null ) tags = new HashMap<String,String>();
tags.put( key, value );
}
public String getTag( String key )
{
return tags == null ? null : tags.get( key );
}
public HashMap<String,String> getTagsOrNull()
{
return tags;
}
public void setTags( HashMap<String,String> tags )
{
this.tags = tags;
}
protected static long readId( DataInputStream is) throws IOException
{
int offset = is.readByte();
if ( offset == 32 ) return -1;
long i = is.readInt();
i = i << 5;
return i | offset;
}
protected static void writeId( DataOutputStream o, long id ) throws IOException
{
if ( id == -1 )
{
o.writeByte( 32 );
return;
}
int offset = (int)( id & 0x1f );
int i = (int)( id >> 5 );
o.writeByte( offset );
o.writeInt( i );
}
protected static File[] sortBySizeAsc( File[] files )
{
int n = files.length;
long[] sizes = new long[n];
File[] sorted = new File[n];
for( int i=0; i<n; i++ ) sizes[i] = files[i].length();
for(int nf=0; nf<n; nf++)
{
int idx = -1;
long min = -1;
for( int i=0; i<n; i++ )
{
if ( sizes[i] != -1 && ( idx == -1 || sizes[i] < min ) )
{
min = sizes[i];
idx = i;
}
}
sizes[idx] = -1;
sorted[nf] = files[idx];
}
return sorted;
}
protected File fileFromTemplate( File template, File dir, String suffix )
{
String filename = template.getName();
filename = filename.substring( 0, filename.length() - 3 ) + suffix;
return new File( dir, filename );
}
protected DataInputStream createInStream( File inFile ) throws IOException
{
return new DataInputStream( new BufferedInputStream ( new FileInputStream( inFile ) ) );
}
protected DiffCoderDataOutputStream createOutStream( File outFile ) throws IOException
{
return new DiffCoderDataOutputStream( new BufferedOutputStream( new FileOutputStream( outFile ) ) );
}
protected DiffCoderDataOutputStream getOutStreamForTile( int tileIndex ) throws Exception
{
if ( tileOutStreams == null )
{
tileOutStreams = new DiffCoderDataOutputStream[64];
}
if ( tileOutStreams[tileIndex] == null )
{
tileOutStreams[tileIndex] = createOutStream( new File( outTileDir, getNameForTile( tileIndex ) ) );
}
return tileOutStreams[tileIndex];
}
protected String getNameForTile( int tileIndex )
{
throw new IllegalArgumentException( "getNameForTile not implemented" );
}
protected void closeTileOutStreams() throws Exception
{
if ( tileOutStreams == null )
{
return;
}
for( int tileIndex=0; tileIndex<tileOutStreams.length; tileIndex++ )
{
if ( tileOutStreams[tileIndex] != null ) tileOutStreams[tileIndex].close();
tileOutStreams[tileIndex] = null;
}
}
// interface dummys
@Override
public void nodeFileStart( File nodefile ) throws Exception {}
@Override
public void nextNode( NodeData n ) throws Exception {}
@Override
public void nodeFileEnd( File nodefile ) throws Exception {}
@Override
public void wayFileStart( File wayfile ) throws Exception {}
@Override
public void nextWay( WayData data ) throws Exception {}
@Override
public void wayFileEnd( File wayfile ) throws Exception {}
@Override
public void relationFileStart( File relfile ) throws Exception {}
@Override
public void nextRelation( RelationData data ) throws Exception {}
@Override
public void relationFileEnd( File relfile ) throws Exception {}
}

View file

@ -0,0 +1,181 @@
package btools.mapsplitter;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import btools.util.DiffCoderDataInputStream;
import btools.util.DiffCoderDataOutputStream;
/**
* Container for node data on the preprocessor level
*
* @author ab
*/
public class NodeData extends MapCreatorBase
{
public long nid;
public int ilon;
public int ilat;
public byte[] description;
public short selev = Short.MIN_VALUE;
public long gid; // geo-id
public int zoom = -1; // the zoom level this node is on
public int nativeIndex; // the index along all NATIVE nodes of it's tile
public transient int localeIndex; // the index along all USED nodes of it's tile
public transient boolean used; // whether this node is used by a way
public NodeData( long id, double lon, double lat )
{
nid = id;
double y = gudermannianInv( lat );
ilat = (int)( (1.-y/Math.PI )*( 1L << 27 )+ 0.5);
ilon = (int)( ( lon/180. + 1. )*( 1L << 27 ) + 0.5 );
}
public NodeData( long id, int ilon, int ilat )
{
this.nid = id;
this.ilat = ilat;
this.ilon = ilon;
}
public boolean inBBox( int z, int x, int y )
{
int shift = 28-z;
int x0 = x << shift;
int x1 = (x+1) << shift;
int y0 = y << shift;
int y1 = (y+1) << shift;
boolean outofbox = x1 < ilon || x0 >= ilon || y1 < ilat || y0 >= ilat;
return !outofbox;
}
public static double gudermannianInv(double latitude)
{
double sign = latitude < 0. ? -1. : 1.;
double sin = Math.sin( latitude * (Math.PI / 180.) * sign);
return sign * (Math.log((1.0 + sin) / (1.0 - sin)) / 2.0);
}
public static double gudermannian(double y)
{
return Math.atan(Math.sinh(y)) * (180. / Math.PI);
}
public double getLon()
{
return (((double)ilon)/( 1L << 27 ) - 1.)*180.;
}
public double getLat()
{
double y = (1. - ((double)ilat)/( 1L << 27 ))*Math.PI;
return gudermannian(y);
}
public void calcGeoId()
{
if ( zoom < 0 ) throw new IllegalArgumentException( "no zoom level yet" );
gid = 0L;
for( long bm = 1L << (27-zoom); bm > 0; bm >>= 1 )
{
gid <<= 2;
if ( ( ilon & bm ) != 0 ) gid |= 1;
if ( ( ilat & bm ) != 0 ) gid |= 2;
}
}
public static void sortByGeoId( List<NodeData> nodes )
{
Collections.sort( nodes, new Comparator<NodeData>()
{
@Override
public int compare(NodeData n1, NodeData n2)
{
long d = n1.gid - n2.gid;
// for equal positions sort by nid
if ( d == 0L )
{
d = n1.nid - n2.nid;
}
return d == 0 ? 0 : ( d < 0 ? -1 : 1 );
}
} );
}
public NodeData( DiffCoderDataInputStream dis ) throws Exception
{
zoom = dis.readInt();
nativeIndex = dis.readInt();
nid = dis.readDiffed( 0 );
ilon = (int)dis.readDiffed( 1 );
ilat = (int)dis.readDiffed( 2 );
for (;;)
{
String key = dis.readUTF();
if ( key.length() == 0 ) break;
String value = dis.readUTF();
putTag( key, value );
}
}
public void writeTo( DiffCoderDataOutputStream dos ) throws Exception
{
dos.writeInt( zoom );
dos.writeInt( nativeIndex );
dos.writeDiffed( nid, 0 );
dos.writeDiffed( ilon, 1 );
dos.writeDiffed( ilat, 2 );
if ( getTagsOrNull() != null )
{
for( Map.Entry<String,String> me : getTagsOrNull().entrySet() )
{
if ( me.getKey().length() > 0 )
{
dos.writeUTF( me.getKey() );
dos.writeUTF( me.getValue() );
}
}
}
dos.writeUTF( "" );
}
private int mercatorLon( long x, long z )
{
return (int) ( ( 360000000L * x ) >> z );
}
private int mercatorLat( long y, long z )
{
double n = Math.PI - ( 2.0 * Math.PI * y ) / ( 1L << z );
double d = Math.toDegrees( Math.atan( Math.sinh( n ) ) );
return (int) ( ( d + 90. ) * 1000000. + 0.5 );
}
@Override
public boolean equals( Object o )
{
if ( o instanceof NodeData )
{
NodeData n = (NodeData) o;
return n.nid == nid;
}
return false;
}
@Override
public int hashCode()
{
return (int)((nid >> 32) ^ nid);
}
}

View file

@ -0,0 +1,65 @@
package btools.mapsplitter;
import java.io.BufferedInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import btools.util.DiffCoderDataInputStream;
/**
* Iterate over a singe nodefile or a directory
* of nodetiles and feed the nodes to the callback listener
*
* @author ab
*/
public class NodeIterator extends MapCreatorBase
{
private NodeListener listener;
public NodeIterator( NodeListener nodeListener )
{
listener = nodeListener;
}
public void processDir( File indir, String inSuffix ) throws Exception
{
if ( !indir.isDirectory() )
{
throw new IllegalArgumentException( "not a directory: " + indir );
}
File[] af = sortBySizeAsc( indir.listFiles() );
for( int i=0; i<af.length; i++ )
{
File nodefile = af[i];
if ( nodefile.getName().endsWith( inSuffix ) )
{
processFile( nodefile );
}
}
}
public void processFile(File nodefile) throws Exception
{
System.out.println( "*** NodeIterator reading: " + nodefile );
listener.nodeFileStart( nodefile );
DiffCoderDataInputStream di = new DiffCoderDataInputStream( new BufferedInputStream ( new FileInputStream( nodefile ) ) );
try
{
for(;;)
{
NodeData n = new NodeData( di );
listener.nextNode( n );
}
}
catch( EOFException eof )
{
di.close();
}
listener.nodeFileEnd( nodefile );
}
}

View file

@ -0,0 +1,17 @@
package btools.mapsplitter;
import java.io.File;
/**
* Callbacklistener for NodeIterator
*
* @author ab
*/
public interface NodeListener
{
void nodeFileStart( File nodefile ) throws Exception;
void nextNode( NodeData data ) throws Exception;
void nodeFileEnd( File nodefile ) throws Exception;
}

View file

@ -0,0 +1,248 @@
package btools.mapsplitter;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.GZIPInputStream;
import btools.util.LongList;
/**
* Parser for OSM data
*
* @author ab
*/
public class OsmParser2 extends MapCreatorBase
{
private BufferedReader _br;
private NodeListener nListener;
private WayListener wListener;
private RelationListener rListener;
public void readMap( File mapFile,
NodeListener nListener,
WayListener wListener,
RelationListener rListener ) throws Exception
{
this.nListener = nListener;
this.wListener = wListener;
this.rListener = rListener;
if ( mapFile == null )
{
_br = new BufferedReader(new InputStreamReader(System.in, "UTF8" ));
}
else
{
if ( mapFile.getName().endsWith( ".gz" ) )
{
_br = new BufferedReader(new InputStreamReader( new GZIPInputStream( new FileInputStream( mapFile ) ),"UTF8" ) );
}
else
{
_br = new BufferedReader(new InputStreamReader( new FileInputStream( mapFile ) , "UTF8" ) );
}
}
for(;;)
{
String line = _br.readLine();
if ( line == null ) break;
if ( checkNode( line ) ) continue;
if ( checkWay( line ) ) continue;
if ( checkRelation( line ) ) continue;
if ( checkChangeset( line ) ) continue;
}
if ( mapFile != null )
{
_br.close();
}
}
private boolean checkNode( String line ) throws Exception
{
int idx0 = line.indexOf( "<node id=\"" );
if ( idx0 < 0 ) return false;
idx0 += 10;
int idx1 = line.indexOf( '"', idx0 );
long nodeId = Long.parseLong( line.substring( idx0, idx1 ) );
int idx2 = line.indexOf( " lat=\"" );
if ( idx2 < 0 ) return false;
idx2 += 6;
int idx3 = line.indexOf( '"', idx2 );
double lat = Double.parseDouble( line.substring( idx2, idx3 ) );
int idx4 = line.indexOf( " lon=\"" );
if ( idx4 < 0 ) return false;
idx4 += 6;
int idx5 = line.indexOf( '"', idx4 );
double lon = Double.parseDouble( line.substring( idx4, idx5 ) );
NodeData n = new NodeData( nodeId, lon, lat );
if ( !line.endsWith( "/>" ) )
{
// read additional tags
for(;;)
{
String l2 = _br.readLine();
if ( l2 == null ) return false;
int i2;
if ( (i2 = l2.indexOf( "<tag k=\"" )) >= 0 )
{ // property-tag
i2 += 8;
int ri2 = l2.indexOf( '"', i2 );
String key = l2.substring( i2, ri2 );
i2 = l2.indexOf( " v=\"", ri2 );
if ( i2 >= 0 )
{
i2 += 4;
int ri3 = l2.indexOf( '"', i2 );
String value = l2.substring( i2, ri3 );
n.putTag( key, value );
}
}
else if ( l2.indexOf( "</node>" ) >= 0 )
{ // end-tag
break;
}
}
}
nListener.nextNode( n );
return true;
}
private boolean checkWay( String line ) throws Exception
{
int idx0 = line.indexOf( "<way id=\"" );
if ( idx0 < 0 ) return false;
idx0 += 9;
int idx1 = line.indexOf( '"', idx0 );
long id = Long.parseLong( line.substring( idx0, idx1 ) );
WayData w = new WayData( id );
// read the nodes
for(;;)
{
String l2 = _br.readLine();
if ( l2 == null ) return false;
int i2;
if ( (i2 = l2.indexOf( "<nd ref=\"" )) >= 0 )
{ // node reference
i2 += 9;
int ri2 = l2.indexOf( '"', i2 );
long nid = Long.parseLong( l2.substring( i2, ri2 ) );
w.nodes.add( nid );
}
else if ( (i2 = l2.indexOf( "<tag k=\"" )) >= 0 )
{ // property-tag
i2 += 8;
int ri2 = l2.indexOf( '"', i2 );
String key = l2.substring( i2, ri2 );
i2 = l2.indexOf( " v=\"", ri2 );
if ( i2 >= 0 )
{
i2 += 4;
int ri3 = l2.indexOf( '"', i2 );
String value = l2.substring( i2, ri3 );
w.putTag( key, value );
}
}
else if ( l2.indexOf( "</way>" ) >= 0 )
{ // end-tag
break;
}
}
wListener.nextWay( w );
return true;
}
private boolean checkChangeset( String line ) throws Exception
{
int idx0 = line.indexOf( "<changeset id=\"" );
if ( idx0 < 0 ) return false;
if ( !line.endsWith( "/>" ) )
{
int loopcheck = 0;
for(;;)
{
String l2 = _br.readLine();
if ( l2.indexOf("</changeset>") >= 0 || ++loopcheck > 10000 ) break;
}
}
return true;
}
private boolean checkRelation( String line ) throws Exception
{
int idx0 = line.indexOf( "<relation id=\"" );
if ( idx0 < 0 ) return false;
idx0 += 14;
int idx1 = line.indexOf( '"', idx0 );
long rid = Long.parseLong( line.substring( idx0, idx1 ) );
LongList wayIds = new LongList( 16 );
List<String> roles = new ArrayList<String>(16);
RelationData r = new RelationData( rid, wayIds, roles );
// read the nodes
for(;;)
{
String l2 = _br.readLine();
if ( l2 == null ) return false;
int i2;
if ( (i2 = l2.indexOf( "<member type=\"way\" ref=\"" )) >= 0 ) // <member type="relation" ref="452156" role="backward"/>
{ // node reference
i2 += 24;
int ri2 = l2.indexOf( '"', i2 );
long wid = Long.parseLong( l2.substring( i2, ri2 ) );
int role1 = ri2 + 8;
int role2 = l2.indexOf( '"', role1 );
String role = l2.substring( role1, role2 );
r.ways.add( wid );
r.roles.add( role );
}
else if ( (i2 = l2.indexOf( "<tag k=\"" )) >= 0 )
{ // property-tag
i2 += 8;
int ri2 = l2.indexOf( '"', i2 );
String key = l2.substring( i2, ri2 );
i2 = l2.indexOf( " v=\"", ri2 );
if ( i2 >= 0 )
{
i2 += 4;
int ri3 = l2.indexOf( '"', i2 );
String value = l2.substring( i2, ri3 );
r.putTag( key, value );
}
}
else if ( l2.indexOf( "</relation>" ) >= 0 )
{ // end-tag
break;
}
}
rListener.nextRelation( r );
return true;
}
}

View file

@ -0,0 +1,109 @@
/**
* This program
* - reads an *.osm from stdin
* - writes zoom 0 tiles
*
* @author ab
*/
package btools.mapsplitter;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import btools.util.DiffCoderDataOutputStream;
public class OsmSplitter extends MapCreatorBase
{
private long recordCnt;
private long nodesParsed;
private long waysParsed;
private long relsParsed;
private long changesetsParsed;
private DataOutputStream wayDos;
private DataOutputStream relDos;
private DiffCoderDataOutputStream nodeDos;
public static void main(String[] args) throws Exception
{
System.out.println("*** OsmSplitter : transform an osm map to zoom 0 tiles");
if (args.length != 2)
{
System.out.println("usage : java OsmSplitter <tile-dir> <inputfile> ");
return;
}
new OsmSplitter().process(
new File( args[0] )
, new File( args[1] )
);
}
public void process (File outTileDir, File mapFile ) throws Exception
{
if ( !outTileDir.isDirectory() ) throw new RuntimeException( "out tile directory " + outTileDir + " does not exist" );
File z0 = new File( outTileDir, "0" );
z0.mkdirs();
File ways = new File( z0, "0_0.wtl" );
File nodes = new File( z0, "0_0.ntl" );
File rels = new File( z0, "0_0.rtl" );
wayDos = new DataOutputStream( new BufferedOutputStream( new FileOutputStream( ways ) ) );
relDos = new DataOutputStream( new BufferedOutputStream( new FileOutputStream( rels ) ) );
nodeDos = new DiffCoderDataOutputStream( new BufferedOutputStream( new FileOutputStream( nodes ) ) );
// read the osm map into memory
long t0 = System.currentTimeMillis();
new OsmParser2().readMap( mapFile, this, this, this );
long t1 = System.currentTimeMillis();
System.out.println( "parsing time (ms) =" + (t1-t0) );
// close all files
wayDos.close();
nodeDos.close();
System.out.println( statsLine() );
}
private void checkStats()
{
if ( (++recordCnt % 100000) == 0 ) System.out.println( statsLine() );
}
private String statsLine()
{
return "records read: " + recordCnt + " nodes=" + nodesParsed + " ways=" + waysParsed + " rels=" + relsParsed + " changesets=" + changesetsParsed;
}
@Override
public void nextNode( NodeData n ) throws Exception
{
nodesParsed++;
checkStats();
n.writeTo( nodeDos );
}
@Override
public void nextWay( WayData w ) throws Exception
{
waysParsed++;
checkStats();
w.writeTo( wayDos );
}
@Override
public void nextRelation( RelationData r ) throws Exception
{
relsParsed++;
checkStats();
r.writeTo( relDos );
}
}

View file

@ -0,0 +1,74 @@
package btools.mapsplitter;
import java.io.DataInputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import btools.util.LongList;
/**
* Container for relation data on the preprocessor level
*
* @author ab
*/
public class RelationData extends MapCreatorBase
{
public long rid;
public LongList ways;
public List<String> roles;
public RelationData( long id, LongList ways, List<String> roles )
{
rid = id;
this.ways = ways;
this.roles = roles;
}
public RelationData( DataInputStream di ) throws Exception
{
ways = new LongList( 16 );
roles = new ArrayList<String>();
rid = readId( di) ;
for (;;)
{
String key = di.readUTF();
if ( key.length() == 0 ) break;
String value = di.readUTF();
putTag( key, value );
}
for (;;)
{
long wid = readId( di );
if ( wid == -1 ) break;
ways.add( wid );
roles.add( di.readUTF() );
}
}
public void writeTo( java.io.DataOutputStream dos ) throws Exception
{
writeId( dos, rid );
if ( getTagsOrNull() != null )
{
for( Map.Entry<String,String> me : getTagsOrNull().entrySet() )
{
if ( me.getKey().length() > 0 )
{
dos.writeUTF( me.getKey() );
dos.writeUTF( me.getValue() );
}
}
}
dos.writeUTF( "" );
int size = ways.size();
for( int i=0; i < size; i++ )
{
writeId( dos, ways.get( i ) );
dos.writeUTF( roles.get(i) );
}
writeId( dos, -1 ); // stopbyte
}
}

View file

@ -0,0 +1,44 @@
package btools.mapsplitter;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
/**
* Iterate over a relation file
*
* @author ab
*/
public class RelationIterator extends MapCreatorBase
{
private RelationListener listener;
public RelationIterator( RelationListener relationListener )
{
listener = relationListener;
}
public void processFile(File relationfile) throws Exception
{
System.out.println( "*** RelationIterator reading: " + relationfile );
listener.relationFileStart( relationfile );
DataInputStream di = new DataInputStream( new BufferedInputStream ( new FileInputStream( relationfile ) ) );
try
{
for(;;)
{
RelationData r = new RelationData( di );
listener.nextRelation( r );
}
}
catch( EOFException eof )
{
di.close();
}
listener.relationFileEnd( relationfile );
}
}

View file

@ -0,0 +1,18 @@
package btools.mapsplitter;
import java.io.File;
/**
* Callbacklistener for Relations
*
* @author ab
*/
public interface RelationListener
{
void relationFileStart( File relfile ) throws Exception;
void nextRelation( RelationData data ) throws Exception;
void relationFileEnd( File relfile ) throws Exception;
}

View file

@ -0,0 +1,194 @@
package btools.mapsplitter;
import java.util.Comparator;
import java.util.HashMap;
import java.util.PriorityQueue;
/**
* Encoder for a set of tags
*
* It detects identical sets and sorts them
* into a huffman-tree according to their frequencies
*
* Adapted for 3-pass encoding (counters -> statistics -> encoding )
* but doesn't do anything at pass1
*/
public final class TagSetEncoder
{
private HashMap<TagSet, TagSet> identityMap;
private BitWriteBuffer bwb;
private int pass;
private TagSet freq1;
public void encodeTagSet( int[] data )
{
if ( pass == 1 )
{
return;
}
TagSet tvsProbe = new TagSet();
tvsProbe.data = data;
TagSet tvs = identityMap.get( tvsProbe );
if ( pass == 3 )
{
if ( tvs.frequency == 1 )
{
bwb.encodeBounded( freq1.range - 1, freq1.code );
encodeTagSequence( bwb, data );
}
else
{
bwb.encodeBounded( tvs.range - 1, tvs.code );
}
}
else if ( pass == 2 )
{
if ( tvs == null )
{
tvs = tvsProbe;
identityMap.put( tvs, tvs );
}
tvs.frequency++;
}
}
public void encodeDictionary( BitWriteBuffer bwb )
{
if ( ++pass == 3 )
{
freq1 = new TagSet();
PriorityQueue<TagSet> queue = new PriorityQueue<TagSet>(2*identityMap.size(), new TagSet.FrequencyComparator());
for( TagSet ts : identityMap.values() )
{
if ( ts.frequency > 1 )
{
queue.add( ts );
}
else
{
freq1.frequency++;
}
}
queue.add( freq1 );
while (queue.size() > 1)
{
TagSet node = new TagSet();
node.child1 = queue.poll();
node.child2 = queue.poll();
node.frequency = node.child1.frequency + node.child2.frequency;
queue.add( node );
}
TagSet root = queue.poll();
root.encode( bwb, 1, 0 );
}
this.bwb = bwb;
}
public TagSetEncoder()
{
identityMap = new HashMap<TagSet, TagSet>();
}
private static void encodeTagSequence( BitWriteBuffer bwb, int[] data )
{
int tagcount = data.length;
bwb.encodeInt( tagcount );
int lastIdx = -1;
for( int i=0; i<tagcount; i++ )
{
int idx = data[i];
bwb.encodeInt( idx - lastIdx -1 );
lastIdx = idx;
}
}
public static final class TagSet
{
public int[] data;
public int frequency;
public int code;
public int range;
public TagSet child1;
public TagSet child2;
public void encode( BitWriteBuffer bwb, int range, int code )
{
this.range = range;
this.code = code;
boolean isNode = child1 != null;
bwb.encodeBit( isNode );
if ( isNode )
{
child1.encode( bwb, range << 1, code );
child2.encode( bwb, range << 1, code + range );
}
else
{
bwb.encodeBit( data == null );
if ( data != null )
{
encodeTagSequence( bwb, data );
}
}
}
@Override
public boolean equals( Object o )
{
if ( o instanceof TagSet )
{
TagSet tvs = (TagSet) o;
if ( data == null )
{
return tvs.data == null;
}
if ( tvs.data == null )
{
return data == null;
}
if ( data.length != tvs.data.length )
{
return false;
}
for ( int i = 0; i < data.length; i++ )
{
if ( data[i] != tvs.data[i] )
{
return false;
}
}
return true;
}
return false;
}
@Override
public int hashCode()
{
if ( data == null )
{
return 0;
}
int h = 17;
for ( int i = 0; i < data.length; i++ )
{
h = ( h << 8 ) + data[i];
}
return h;
}
public static class FrequencyComparator implements Comparator<TagSet>
{
@Override
public int compare(TagSet tvs1, TagSet tvs2) {
if ( tvs1.frequency < tvs2.frequency )
return -1;
if ( tvs1.frequency > tvs2.frequency )
return 1;
return 0;
}
}
}
}

View file

@ -0,0 +1,303 @@
package btools.mapsplitter;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.PriorityQueue;
/**
* Encoder for the tag-value statistics
*
* @author ab
*/
public class TagValueEncoder
{
HashMap<String,Tag> tags = new HashMap<String,Tag>();
ArrayList<TagGroup> groups = new ArrayList<TagGroup>();
ByteArrayOutputStream baos ;
DataOutputStream dos;
ArrayList<String> stringList;
HashMap<String,Integer> stringMap;
ArrayList<Tag> taglist;
private int setId = 0;
private int nextIdx = 0;
private int pass;
private static String[][] taggroups = new String[][] {
{ "highway", "name", "maxspeed", "lanes", "service", "tracktype", "surface" }
, { "access", "foot", "bicycle", "motorcar", "motor_vehicle", "motorcycle", "vehicle" }
, { "building", "addr:street", "addr:housenumber", "addr:city", "addr:postcode", "addr:housename" }
};
private void encodeString( BitWriteBuffer bc, String s )
{
Integer ii = stringMap.get( s );
bc.encodeBit( ii == null );
if ( ii == null )
{
try
{
byte[] textBytes = s.getBytes( "UTF8" );
bc.encodeInt( textBytes.length );
dos.write( textBytes );
}
catch( Exception e )
{
throw new RuntimeException( e );
}
ii = Integer.valueOf( stringList.size() );
stringList.add( s );
stringMap.put( s, ii );
return;
}
bc.encodeBounded( stringList.size()-1, ii.intValue() );
}
private class TagGroup implements Comparable<TagGroup>
{
String[] names;
int count;
int lastSetId = 0;
void incCount()
{
if ( setId != lastSetId ) count++;
lastSetId = setId;
}
TagGroup( String[] names )
{
this.names = names;
for( String name : names )
{
tags.put( name, new Tag( name, this ) );
}
groups.add( this );
}
void indexTags()
{
for( String name : names )
{
Tag t = tags.get( name );
if ( t.count > 0 ) t.idx = nextIdx++;
}
}
@Override
public int compareTo( TagGroup g )
{
return g.count - count;
}
}
public TagValueEncoder()
{
for( String[] names : taggroups )
{
new TagGroup( names );
}
}
public class Tag implements Comparable<Tag>
{
Tag( String name, TagGroup group )
{
this.name = name;
this.group = group;
}
String name;
int count;
int idx;
private Object tree;
HashMap<String,Value> values = new HashMap<String,Value>();
List<Value> valueList;
TagGroup group;
void addValue( String value )
{
Value v = values.get( value );
if ( v == null )
{
v = new Value( value );
values.put( value, v );
}
v.frequency++;
count++;
}
public void encodeDictionary( BitWriteBuffer bc ) throws IOException
{
encodeString( bc, name );
PriorityQueue<Value> queue = new PriorityQueue<Value>( values.size() );
queue.addAll( values.values() );
while (queue.size() > 1)
{
queue.add( new Value( queue.poll(), queue.poll() ) );
}
queue.poll().encodeTree( bc, 1, 0 );
}
@Override
public int compareTo( Tag t )
{
return idx - t.idx;
}
}
private class Value implements Comparable<Value>
{
Value( String value )
{
this.value = value;
}
Value( Value c1, Value c2 )
{
child1 = c1;
child2 = c2;
frequency = c1.frequency + c2.frequency;
}
String value;
int code;
int range;
Value child1;
Value child2;
int frequency;
void encodeTree( BitWriteBuffer bc, int range, int code ) throws IOException
{
this.range = range;
this.code = code;
boolean isNode = child1 != null;
bc.encodeBit( isNode );
if ( isNode )
{
child1.encodeTree( bc, range << 1, code );
child2.encodeTree( bc, range << 1, code + range );
return;
}
bc.encodeBit( false ); // no inline item here
encodeString( bc, value );
}
void encode( BitWriteBuffer bc )
{
bc.encodeBounded( range - 1, code );
}
@Override
public int compareTo( Value v )
{
return frequency - v.frequency;
}
}
public byte[] encodeDictionary( BitWriteBuffer bc ) throws IOException
{
if ( ++pass == 1 )
{
return null;
}
else if ( pass == 2 )
{
nextIdx = 0;
Collections.sort( groups );
for( TagGroup g : groups )
{
g.indexTags();
}
taglist = new ArrayList<Tag>();
for( Tag t : tags.values() )
{
if ( t.count > 0 )
{
taglist.add( t );
}
}
Collections.sort( taglist );
return null;
}
stringList = new ArrayList<String>();
stringMap = new HashMap<String,Integer>();
baos = new ByteArrayOutputStream();
dos = new DataOutputStream( baos );
bc.encodeInt( taglist.size() );
for( Tag t : taglist )
{
t.encodeDictionary( bc );
}
dos.close();
byte[] textData = baos.toByteArray();
dos = null;
baos = null;
return textData;
}
public int getTagIndex( String name )
{
return tags.get( name ).idx;
}
public List<String> sortTagNames( Collection<String> col )
{
ArrayList<Tag> taglist = new ArrayList<Tag>( col.size() );
for( String name : col )
{
taglist.add( tags.get( name ) );
}
Collections.sort( taglist );
ArrayList<String> res = new ArrayList<String>( taglist.size() );
for( Tag t : taglist )
{
res.add( t.name );
}
return res;
}
public void startTagSet()
{
if ( pass == 1 )
{
setId++;
}
}
public void encodeValue( BitWriteBuffer bc, String name, String value )
{
if ( pass == 1 )
{
Tag t = tags.get( name );
if ( t == null )
{
String[] names = new String[1];
names[0] = name;
new TagGroup( names );
t = tags.get( name );
}
t.addValue( value );
}
else // pass 2+3
{
tags.get( name ).values.get( value ).encode( bc );
}
}
}

View file

@ -0,0 +1,21 @@
package btools.mapsplitter;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import btools.util.DiffCoderDataInputStream;
import btools.util.DiffCoderDataOutputStream;
/**
* Container a tile during encoding
*/
public class TileData extends MapCreatorBase
{
public int zoom;
public int x;
public int y;
public List<NodeData> nodeList;
public TileData parent;
}

View file

@ -0,0 +1,555 @@
package btools.mapsplitter;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.Deflater;
/**
* TileEncoder encodes a given node/way file pair
*
* @author ab
*/
public class TileEncoder extends MapCreatorBase
{
private Map<NodeData,NodeData> nodeMap;
private Map<WayData,WayData> wayMap;
private List<NodeData> used = new ArrayList<NodeData>();
private NodeData templateNode = new NodeData( 0, 0, 0 );
private WayData templateWay = new WayData( 0, null );
private TileData tile;
private BitWriteBuffer bwb;
private byte[] buffer;
private List<WayData> wayList;
private List<RelationData> relationList;
// statistics only
private int nTagSets;
private int nTaggedNodes;
private long totalNodes;
private long totalTaggedNodes;
private long totalWays;
private long totalTextBytes;
private long totalTiles;
private int pass;
private boolean dostats;
private TagValueEncoder tagValueEncoder;
private TagSetEncoder tagSetEnoder;
public static void main( String[] args ) throws Exception
{
System.out.println( "*** TileEncoder: encodes a given node/way file pair" );
if ( args.length != 1 )
{
System.out.println( "usage: java TileEncoder <node-file>" );
return;
}
new TileEncoder().process( new File( args[0] ) );
}
public void process( File nodeFile) throws Exception
{
TileData t0 = new TileData(); // zoom 0 dummy
process( nodeFile, t0 );
System.out.println( "**** total statistics ****" );
System.out.println( "tiles=" + totalTiles + " nodes=" + totalNodes + " taggedNodes=" + totalTaggedNodes + " ways=" + totalWays + " textBytes= " + totalTextBytes );
System.out.println( bwb.getBitReport() );
}
public void process( File nodeFile, TileData tile ) throws Exception
{
this.tile = tile;
if ( !nodeFile.exists() )
{
return;
}
System.out.println( "******* processing: " + nodeFile );
new NodeIterator( this ).processFile( nodeFile );
// process childs
int zoomStep = 2;
int xyStep = 1 << zoomStep;
int nextZoom = tile.zoom + zoomStep;
int x0 = tile.x << zoomStep;
int y0 = tile.y << zoomStep;
File childDir = new File( nodeFile.getParentFile().getParentFile(), "" + nextZoom );
for( int dx = 0; dx < xyStep; dx++ )
{
for( int dy = 0; dy < xyStep; dy++ )
{
TileData nextTile = new TileData();
nextTile.zoom = nextZoom;
nextTile.x = x0 + dx;
nextTile.y = y0 + dy;
nextTile.parent = tile;
File nextFile = new File( childDir, nextTile.x + "_" + nextTile.y + ".ntl" );
process( nextFile, nextTile );
}
}
}
@Override
public void nodeFileStart( File nodeFile ) throws Exception
{
tile.nodeList = new ArrayList<NodeData>();
nodeMap = new HashMap<NodeData,NodeData>();
wayMap = new HashMap<WayData,WayData>();
}
@Override
public void nextNode( NodeData n ) throws Exception
{
// if no level yet, it's ours
if ( n.zoom == -1 || n.zoom == tile.zoom )
{
n.zoom = tile.zoom;
n.used = true;
tile.nodeList.add( n );
}
n.localeIndex = nodeMap.size();
nodeMap.put( n,n );
n.calcGeoId();
}
@Override
public void nodeFileEnd( File nodeFile ) throws Exception
{
NodeData.sortByGeoId( tile.nodeList );
int idx = 0;
for( NodeData n : tile.nodeList )
{
n.nativeIndex = idx++;
}
// read corresponding way-file into wayList
wayList = new ArrayList<WayData>();
String name = nodeFile.getName();
String wayfilename = name.substring( 0, name.length()-3 ) + "wtl";
File wayfile = new File( nodeFile.getParent(), wayfilename );
if ( wayfile.exists() )
{
new WayIterator( this ).processFile( wayfile );
}
// read corresponding relation-file
relationList = new ArrayList<RelationData>();
String relfilename = name.substring( 0, name.length()-3 ) + "rtl";
File relfile = new File( nodeFile.getParent(), relfilename );
if ( relfile.exists() )
{
new RelationIterator( this ).processFile( relfile );
}
int nnodes = tile.nodeList.size();
tagValueEncoder = new TagValueEncoder();
tagSetEnoder = new TagSetEncoder();
long[] nodePositions = new long[nnodes];
for( int i=0; i<nnodes; i++ )
{
nodePositions[i] = tile.nodeList.get(i).gid;
}
for( pass=1;pass<=3; pass++) // 3 passes: counters, stat-collection, encoding
{
nTagSets = 0;
dostats = pass == 3;
buffer = new byte[10000000];
bwb = new BitWriteBuffer( buffer );
tagSetEnoder.encodeDictionary( bwb );
if ( dostats ) bwb.assignBits( "tagset-dictionary" );
// encode the dictionary
byte[] textData = tagValueEncoder.encodeDictionary( bwb );
if ( dostats ) bwb.assignBits( "value-dictionary" );
// encode the node positions
bwb.encodeSortedArray( nodePositions );
if ( dostats ) bwb.assignBits( "node-positions" );
if ( pass == 3 )
{
writeDownzoomRefs( bwb );
}
// encode the tagged nodes
writeTaggedNodes();
writeWays( bwb );
writeRelations( bwb );
if ( pass == 1 && nTagSets == 0 ) // stop it if nothing tagged
{
break;
}
if ( pass == 1 )
{
assignLocalIndexes();
}
if ( pass == 3 )
{
// Compress the text-bytes
Deflater compresser = new Deflater();
compresser.setInput(textData);
compresser.finish();
byte[] textHeader = new byte[textData.length + 1024];
int textHeaderLen = compresser.deflate(textHeader);
totalTiles++;
totalNodes += tile.nodeList.size();
totalTaggedNodes += nTaggedNodes;
totalTextBytes += textHeaderLen;
System.out.println( "nodes=" + tile.nodeList.size() + " taggedNodes=" + nTaggedNodes + " ways=" + wayList.size() + " textBytes= " + textHeaderLen );
// write result to file
String datafilename = name.substring( 0, name.length()-3 ) + "osb";
File datafile = new File( nodeFile.getParent(), datafilename );
DataOutputStream dos = new DataOutputStream( new FileOutputStream( datafile ) );
dos.writeInt( textData.length );
dos.writeInt( textHeaderLen );
dos.write( textHeader, 0, textHeaderLen );
int size = bwb.getEncodedLength();
dos.writeInt( size );
dos.write( buffer, 0, size );
dos.close();
}
}
if ( relfile.exists() )
{
relfile.delete();
}
if ( wayfile.exists() )
{
wayfile.delete();
}
if ( nodeFile.exists() )
{
nodeFile.delete();
}
}
@Override
public void nextWay( WayData way ) throws Exception
{
// if no level yet, it's ours
if ( way.zoom == -1 || way.zoom == tile.zoom )
{
way.zoom = tile.zoom;
way.startNodeIdx = -1;
wayList.add( way );
}
wayMap.put( way,way );
}
@Override
public void nextRelation( RelationData r ) throws Exception
{
relationList.add( r );
}
private void assignLocalIndexes()
{
used = new ArrayList<NodeData>();
for( NodeData n : nodeMap.values() )
{
if ( n.used )
{
used.add( n );
}
}
NodeData.sortByGeoId( used );
int idx = 0;
for( NodeData n : used )
{
n.localeIndex = idx++;
}
}
private void writeDownzoomRefs( BitWriteBuffer bwb )
{
// total locale nodes
bwb.encodeInt( used.size() );
for( int zoom=0; zoom<tile.zoom; zoom++ )
{
// count
int cnt = 0;
for( NodeData n : used )
{
if ( n.zoom == zoom )
{
cnt++;
}
}
long[] localeIndexes = new long[cnt];
long[] nativeIndexes = new long[cnt];
int idx = 0;
for( NodeData n : used )
{
if ( n.zoom == zoom )
{
// System.out.println( " ---> locale=" + n.localeIndex + " native=" + n.nativeIndex );
localeIndexes[idx] = n.localeIndex;
nativeIndexes[idx] = n.nativeIndex;
idx++;
}
}
bwb.encodeSortedArray( localeIndexes );
if ( dostats ) bwb.assignBits( "localindexes" );
bwb.encodeSortedArray( nativeIndexes );
if ( dostats ) bwb.assignBits( "nativeindexes" );
}
}
private int getLocaleIndexForNid( long nid )
{
templateNode.nid = nid;
NodeData n = nodeMap.get( templateNode );
if ( n == null ) throw new IllegalArgumentException( "nid=" + nid + " not found" );
n.used = true;
return n.localeIndex;
}
private void encodeWay( BitWriteBuffer bwb, WayData way ) throws Exception
{
int nnodes = way.nodes.size();
boolean closedPoly = way.nodes.get(0) == way.nodes.get(nnodes-1);
if ( closedPoly )
{
nnodes--;
}
if ( nnodes < 2 )
{
return;
}
writeTags( way.getTagsOrNull() );
bwb.encodeBit( closedPoly );
bwb.encodeInt( nnodes-2 );
if ( dostats ) bwb.assignBits( "way-node-count" );
// determine the tile-index for each node
int lastIdx = 0;
for (int i=0; i<nnodes; i++ )
{
long nid = way.nodes.get(i);
int idx = getLocaleIndexForNid( nid );
if ( i == 0 )
{
way.startNodeIdx = idx;
}
else
{
int delta = idx-lastIdx;
if ( delta == 0 )
{
System.out.println( "double node in way, ignoring" );
way.startNodeIdx = -1;
return;
}
boolean negative = delta < 0;
bwb.encodeBit( negative );
bwb.encodeLong( (negative ? -delta : delta) -1 );
if ( dostats ) bwb.assignBits( "way-node-idx-delta" );
}
lastIdx = idx;
}
}
private void writeWays( BitWriteBuffer bwb ) throws Exception
{
// in pass 3, sort ways according startNodeIdx and encode start-indexes
if ( pass == 3 )
{
if ( wayList.size() > 0 )
{
ArrayList<WayData> goodWays = new ArrayList<WayData>();
for( WayData w : wayList )
{
if ( w.startNodeIdx >= 0 )
{
goodWays.add( w );
}
}
WayData.sortByStartNode( goodWays );
wayList = goodWays;
}
// encode start-node-indexes
int waycount = wayList.size();
long[] startIndexes = new long[waycount];
int i = 0;
for( WayData w : wayList )
{
w.nativeIndex = i;
startIndexes[i++] = w.startNodeIdx;
}
bwb.encodeSortedArray( startIndexes );
if ( dostats ) bwb.assignBits( "way-start-idx" );
}
for( WayData way : wayList )
{
encodeWay( bwb, way );
}
}
private void writeRelations( BitWriteBuffer bwb ) throws Exception
{
bwb.encodeInt( relationList.size() );
if ( dostats ) bwb.assignBits( "relation-count" );
for( RelationData rel : relationList )
{
encodeRelation( bwb, rel );
}
}
private void encodeRelation( BitWriteBuffer bwb, RelationData rel ) throws Exception
{
writeTags( rel.getTagsOrNull() );
int size = rel.ways.size();
if ( dostats ) bwb.assignBits( "way-node-count" );
// count valid members
int validMembers = 0;
for( int i=0; i < size; i++ )
{
long wid = rel.ways.get( i );
String role = rel.roles.get(i);
templateWay.wid = wid;
WayData w = wayMap.get( templateWay );
if ( w == null ) continue;
validMembers++;
}
bwb.encodeInt( validMembers );
for( int i=0; i < size; i++ )
{
long wid = rel.ways.get( i );
String role = rel.roles.get(i);
templateWay.wid = wid;
WayData w = wayMap.get( templateWay );
if ( w == null ) continue;
int zoomDelta = tile.zoom - w.zoom;
bwb.encodeInt( zoomDelta );
bwb.encodeInt( w.nativeIndex );
tagValueEncoder.encodeValue( bwb, "role", role );
}
}
private void writeTaggedNodes() throws Exception
{
// count tagged nodes
int cnt = 0;
for( int idx=0; idx<tile.nodeList.size(); idx++ )
{
NodeData n = tile.nodeList.get( idx );
if ( n.zoom == tile.zoom && n.getTagsOrNull() != null )
{
cnt++;
}
}
// build index array
long[] taggedIndexes = new long[cnt];
int i = 0;
for( int idx=0; idx<tile.nodeList.size(); idx++ )
{
if ( tile.nodeList.get( idx ).getTagsOrNull() != null )
{
taggedIndexes[i++] = idx;
}
}
nTaggedNodes = cnt;
bwb.encodeSortedArray( taggedIndexes );
if ( dostats ) bwb.assignBits( "tagged-node-idx" );
for( int idx=0; idx<tile.nodeList.size(); idx++ )
{
NodeData n = tile.nodeList.get( idx );
if ( n.getTagsOrNull() != null )
{
writeTags( n.getTagsOrNull() );
}
}
}
private void writeTags( HashMap<String, String> tags ) throws Exception
{
List<String> names;
if ( tags == null )
{
tags = new HashMap<String, String>();
}
if ( pass > 1 )
{
// create tagset as sorted int-array
names = tagValueEncoder.sortTagNames( tags.keySet() );
int ntags = names.size();
int[] tagset = new int[ ntags ];
for( int i=0; i<ntags; i++ )
{
tagset[i] = tagValueEncoder.getTagIndex( names.get(i) );
}
// ... and encode it
tagSetEnoder.encodeTagSet( tagset );
if ( dostats ) bwb.assignBits( "tag-set" );
}
else
{
nTagSets++;
names = new ArrayList<String>( tags.keySet() ); // unsorted is o.k. in pass 1
}
// then encode the values
tagValueEncoder.startTagSet();
for( String name : names )
{
String value = tags.get( name );
tagValueEncoder.encodeValue( bwb, name, value );
if ( dostats ) bwb.assignBits( "value-index" );
}
}
}

View file

@ -0,0 +1,492 @@
package btools.mapsplitter;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import btools.util.DenseLongMap;
import btools.util.TinyDenseLongMap;
/**
* TileSplitter splits a tile into pieces
*/
public class TileSplitter extends MapCreatorBase
{
private NodeData templateNode = new NodeData( 0, 0, 0 );
private DenseLongMap nodeIndexMap;
private DenseLongMap bigWayMemberMap;
private DenseLongMap wayIndexMap;
private DenseLongMap bigRelMemberMap;
private DenseLongMap relIndexMap;
private Map<NodeData,NodeData> nodeMap;
private List<NodeData> thisLevelNodes;
private Map<Long,Integer> thisLevelNodesIndexes;
private List<WayData> thisLevelWays;
private Map<Long,Integer> thisLevelWaysIndexes;
private int level;
private int baseLon;
private int baseLat;
private int nodeCount = 0;
private String typeSuffix;
private boolean inPassLoop;
private int pass; // 1 == build tileIndexMap, 2 == collect this-level-nodes, 3 == output nodes
private File inTileDir;
public static void main(String[] args) throws Exception
{
System.out.println("*** TileSplitter: cut tiles into 16 pieces");
if (args.length != 1)
{
System.out.println("usage: java TileSplitter <tile-dir>" );
return;
}
new TileSplitter().process( new File( args[0] ) );
}
public void process( File tileDir) throws Exception
{
for( int level = 0; level < 12; level += 2 )
{
process( tileDir, level );
}
}
public void process( File tileDir, int level ) throws Exception
{
System.out.println("processing level: " + level );
inTileDir = new File( tileDir, "" + (level) );
outTileDir = new File( tileDir, "" + (level+2) );
outTileDir.mkdirs();
this.level = level;
// *** initialize 3-pass processing of nodes, ways and relations
inPassLoop = false;
new NodeIterator( this ).processDir( inTileDir, ".ntl" );
}
@Override
public void nodeFileStart( File nodeFile ) throws Exception
{
if ( !inPassLoop )
{
inPassLoop = true;
pass = 1;
new NodeIterator( this ).processFile( nodeFile );
pass = 2;
new NodeIterator( this ).processFile( nodeFile );
pass = 3;
new NodeIterator( this ).processFile( nodeFile );
pass = 4;
inPassLoop = false;
}
System.out.println( "nodeFileStart pass=" + pass );
if ( pass == 1 )
{
getBaseTileFromName( nodeFile.getName() );
nodeIndexMap = Boolean.getBoolean( "useDenseMaps" ) ? new DenseLongMap() : new TinyDenseLongMap();
}
else if ( pass == 2 )
{
}
else if ( pass == 3 )
{
nodeMap = new HashMap<NodeData,NodeData>();
thisLevelNodes = new ArrayList<NodeData>();
}
else // nodePass = 4
{
NodeData.sortByGeoId( thisLevelNodes );
thisLevelNodesIndexes = new HashMap<Long,Integer>();
int idx = 0;
for( NodeData n : thisLevelNodes )
{
thisLevelNodesIndexes.put( Long.valueOf( n.nid ), Integer.valueOf( idx++ ) );
}
thisLevelNodes = null;
}
typeSuffix = "ntl";
}
private void getBaseTileFromName( String name )
{
System.out.println( "getBaseTileFromName: " + name );
int idx1 = name.indexOf( '_' );
int idx2 = name.indexOf( '.' );
baseLon = Integer.parseInt( name.substring( 0, idx1 ) );
baseLat = Integer.parseInt( name.substring( idx1+1, idx2 ) );
}
@Override
public void nextNode( NodeData n ) throws Exception
{
int tidx = getTileIndex( n );
if ( pass == 1 )
{
nodeCount++;
nodeIndexMap.put( n.nid, tidx );
}
else if ( pass == 2 )
{
}
else
{
boolean usedHere = bigWayMemberMap.getInt( n.nid ) == 0;
if ( usedHere ) // if used on this level...
{
// if no level yet, this is it
if ( n.zoom == -1 )
{
n.zoom = level;
}
}
if ( pass == 3 )
{
if ( n.zoom != -1 )
{
n.calcGeoId();
nodeMap.put( n,n );
if ( n.zoom == level )
{
thisLevelNodes.add( n );
}
}
}
else // pass == 4
{
// add the index
if ( n.zoom == level )
{
n.nativeIndex = thisLevelNodesIndexes.get( Long.valueOf( n.nid ) );
}
if ( usedHere )
{
n.writeTo( getOutStreamForTile( 16 ) );
}
n.writeTo( getOutStreamForTile( tidx ) ); // write to subtile
}
}
}
@Override
public void nodeFileEnd( File nodeFile ) throws Exception
{
System.out.println( "nodeFileEnd pass=" + pass );
closeTileOutStreams();
File parentNodes = new File( outTileDir, getNameForTile( 16 ) );
// read corresponding way-file
if ( pass == 2 )
{
bigWayMemberMap = Boolean.getBoolean( "useDenseMaps" ) ? new DenseLongMap() : new TinyDenseLongMap();
}
String name = nodeFile.getName();
String wayfilename = name.substring( 0, name.length()-3 ) + "wtl";
File wayfile = new File( inTileDir, wayfilename );
if ( wayfile.exists() )
{
new WayIterator( this ).processFile( wayfile );
}
// read corresponding relation-file
if ( pass == 1 )
{
bigRelMemberMap = Boolean.getBoolean( "useDenseMaps" ) ? new DenseLongMap() : new TinyDenseLongMap();
}
String relfilename = name.substring( 0, name.length()-3 ) + "rtl";
File relfile = new File( inTileDir, relfilename );
if ( relfile.exists() )
{
new RelationIterator( this ).processFile( relfile );
}
if ( pass == 4 )
{
nodeFile.delete();
if ( parentNodes.exists() )
{
parentNodes.renameTo( nodeFile );
}
else if ( nodeCount > 0 )
{
nodeFile.createNewFile(); // create even empty to signal existence of childs
}
}
}
@Override
public void wayFileStart( File wayFile ) throws Exception
{
System.out.println( "wayFileStart pass=" + pass );
if ( pass == 1 )
{
wayIndexMap = Boolean.getBoolean( "useDenseMaps" ) ? new DenseLongMap() : new TinyDenseLongMap();
}
else if ( pass == 3 )
{
thisLevelWays = new ArrayList<WayData>();
}
else if ( pass == 4 )
{
WayData.sortByStartNode( thisLevelWays );
thisLevelWaysIndexes = new HashMap<Long,Integer>();
int idx = 0;
for( WayData w : thisLevelWays )
{
thisLevelWaysIndexes.put( Long.valueOf( w.wid ), Integer.valueOf( idx++ ) );
}
thisLevelWays = null;
}
typeSuffix = "wtl";
}
@Override
public void nextWay( WayData w ) throws Exception
{
int widx = getTileIndex( w );
if ( widx == -1 )
{
System.out.println( "************ invalid way: " + w.wid );
return;
}
if ( pass == 1 )
{
wayIndexMap.put( w.wid, widx );
}
else // pass >= 2
{
boolean usedHere = bigRelMemberMap.getInt( w.wid ) == 0;
if ( usedHere || widx == 16 )
{
// if no level yet, this is it
if ( w.zoom == -1 )
{
w.zoom = level;
}
if ( pass == 2 )
{
int nnodes = w.nodes.size();
for (int i=0; i<nnodes; i++ )
{
bigWayMemberMap.put( w.nodes.get(i), 0 );
}
}
}
if ( pass == 3 )
{
if ( w.zoom == level )
{
w.startNodeIdx = getLocaleIndexForNid( w.nodes.get(0) );
thisLevelWays.add( w );
}
}
if ( pass == 4 )
{
if ( w.zoom == level )
{
w.nativeIndex = thisLevelWaysIndexes.get( Long.valueOf( w.wid ) );
}
if ( usedHere && widx != 16 )
{
w.writeTo( getOutStreamForTile( 16 ) );
}
w.writeTo( getOutStreamForTile( widx ) );
}
}
}
@Override
public void wayFileEnd( File wayFile ) throws Exception
{
System.out.println( "wayFileEnd pass=" + pass );
closeTileOutStreams();
if ( pass == 4 )
{
wayFile.delete();
File parentWays = new File( outTileDir, getNameForTile( 16 ) );
if ( parentWays.exists() )
{
parentWays.renameTo( wayFile );
}
}
}
@Override
public void relationFileStart( File relFile ) throws Exception
{
System.out.println( "relFileStart pass=" + pass );
if ( pass == 1 )
{
relIndexMap = Boolean.getBoolean( "useDenseMaps" ) ? new DenseLongMap() : new TinyDenseLongMap();
}
else if ( pass == 2 )
{
}
else // nodePass = 3
{
}
typeSuffix = "rtl";
}
@Override
public void nextRelation( RelationData r ) throws Exception
{
int ridx = getTileIndex( r );
if ( ridx == -1 )
{
System.out.println( "************ invalid relation: " + r.rid );
return;
}
if ( pass == 1 )
{
relIndexMap.put( r.rid, ridx );
}
if ( pass == 1 && ridx == 16 )
{
int nways = r.ways.size();
for (int i=0; i<nways; i++ )
{
bigRelMemberMap.put( r.ways.get(i), 0 );
}
}
if ( pass == 4 )
{
r.writeTo( getOutStreamForTile( ridx ) );
}
}
@Override
public void relationFileEnd( File relFile ) throws Exception
{
System.out.println( "relFileEnd pass=" + pass );
closeTileOutStreams();
if ( pass == 4 )
{
relFile.delete();
File parentRels = new File( outTileDir, getNameForTile( 16 ) );
if ( parentRels.exists() )
{
parentRels.renameTo( relFile );
}
}
}
private int getLocaleIndexForNid( long nid )
{
templateNode.nid = nid;
NodeData n = nodeMap.get( templateNode );
if ( n == null ) throw new IllegalArgumentException( "nid=" + nid + " not found" );
n.used = true;
return n.localeIndex;
}
private int getTileIndex( NodeData n )
{
int idxLon = ( n.ilon >> ( 26 - level ) ) & 3;
int idxLat = ( n.ilat >> ( 26 - level ) ) & 3;
return 4 * idxLon + idxLat;
}
private int getTileIndex( WayData w )
{
int nnodes = w.nodes.size();
int wayTileIndex = 16;
// determine the tile-index for each node
for (int i=0; i<nnodes; i++ )
{
int tileIndex = nodeIndexMap.getInt( w.nodes.get(i) );
if ( tileIndex == -1 )
{
return -1;
}
if ( wayTileIndex == 16 )
{
wayTileIndex = tileIndex;
}
else if ( tileIndex != wayTileIndex )
{
return 16;
}
}
return wayTileIndex;
}
private int getTileIndex( RelationData r )
{
int nways = r.ways.size();
int relTileIndex = 16;
boolean hasAny = false;
// determine the tile-index for each way
for (int i=0; i<nways; i++ )
{
int tileIndex = wayIndexMap.getInt( r.ways.get(i) );
if ( tileIndex == -1 )
{
continue;
}
hasAny = true;
if ( relTileIndex == 16 )
{
relTileIndex = tileIndex;
}
else if ( tileIndex != relTileIndex )
{
return 16;
}
}
return hasAny ? relTileIndex : -1;
}
protected String getNameForTile( int tileIndex )
{
if ( tileIndex == 16 )
{
return "parent." + typeSuffix;
}
int idxLon = baseLon * 4 + (tileIndex >> 2);
int idxLat = baseLat * 4 + (tileIndex & 3);
return idxLon + "_" + idxLat + "." + typeSuffix;
}
}

View file

@ -0,0 +1,159 @@
package btools.mapsplitter;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import btools.util.LongList;
/**
* Container for waydata on the preprocessor level
*
* @author ab
*/
public class WayData extends MapCreatorBase
{
public long wid;
public LongList nodes;
public int startNodeIdx;
private int minx;
private int miny;
private int maxx;
private int maxy;
public int zoom = -1; // the zoom level this node is on
public int nativeIndex; // the index along all NATIVE ways of it's tile
public void calcBBox( List<NodeData> nodeList )
{
int nn = nodes.size();
for( int i=0; i<nn; i++ )
{
NodeData n = nodeList.get((int)nodes.get(i));
if ( i == 0 )
{
minx = maxx = n.ilon;
miny = maxy = n.ilat;
}
else
{
if ( n.ilon < minx ) minx = n.ilon;
if ( n.ilon > maxx ) maxx = n.ilon;
if ( n.ilat < miny ) miny = n.ilat;
if ( n.ilat > maxy ) maxy = n.ilat;
}
}
}
public boolean inBBox( int z, int x, int y )
{
int shift = 28-z;
int x0 = x << shift;
int x1 = (x+1) << shift;
int y0 = y << shift;
int y1 = (y+1) << shift;
boolean outofbox = x1 < minx || x0 >= maxx || y1 < miny || y0 >= maxy;
return !outofbox;
}
public WayData( long id )
{
wid = id;
nodes = new LongList( 16 );
}
public WayData( long id, LongList nodes )
{
wid = id;
this.nodes = nodes;
}
public WayData( DataInputStream di ) throws Exception
{
zoom = di.readInt();
nativeIndex = di.readInt();
nodes = new LongList( 16 );
wid = readId( di) ;
for (;;)
{
String key = di.readUTF();
if ( key.length() == 0 ) break;
String value = di.readUTF();
putTag( key, value );
}
for (;;)
{
long nid = readId( di );
if ( nid == -1 ) break;
nodes.add( nid );
}
}
public void writeTo( DataOutputStream dos ) throws Exception
{
dos.writeInt( zoom );
dos.writeInt( nativeIndex );
writeId( dos, wid );
if ( getTagsOrNull() != null )
{
for( Map.Entry<String,String> me : getTagsOrNull().entrySet() )
{
if ( me.getKey().length() > 0 )
{
dos.writeUTF( me.getKey() );
dos.writeUTF( me.getValue() );
}
}
}
dos.writeUTF( "" );
int size = nodes.size();
for( int i=0; i < size; i++ )
{
writeId( dos, nodes.get( i ) );
}
writeId( dos, -1 ); // stopbyte
}
public static void sortByStartNode( List<WayData> ways )
{
Collections.sort( ways, new Comparator<WayData>()
{
@Override
public int compare(WayData w1, WayData w2)
{
long d = w1.startNodeIdx - w2.startNodeIdx;
// for equal start indexes sort by wid
if ( d == 0L )
{
d = w1.wid - w2.wid;
}
return d == 0 ? 0 : ( d < 0 ? -1 : 1 );
}
} );
}
@Override
public boolean equals( Object o )
{
if ( o instanceof WayData )
{
WayData w = (WayData) o;
return w.wid == wid;
}
return false;
}
@Override
public int hashCode()
{
return (int)((wid >> 32) ^ wid);
}
}

View file

@ -0,0 +1,45 @@
package btools.mapsplitter;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
/**
* Iterate over a singe wayfile or a directory
* of waytiles and feed the ways to the callback listener
*
* @author ab
*/
public class WayIterator extends MapCreatorBase
{
private WayListener listener;
public WayIterator( WayListener wayListener )
{
listener = wayListener;
}
public void processFile(File wayfile) throws Exception
{
System.out.println( "*** WayIterator reading: " + wayfile );
listener.wayFileStart( wayfile );
DataInputStream di = new DataInputStream( new BufferedInputStream ( new FileInputStream( wayfile ) ) );
try
{
for(;;)
{
WayData w = new WayData( di );
listener.nextWay( w );
}
}
catch( EOFException eof )
{
di.close();
}
listener.wayFileEnd( wayfile );
}
}

View file

@ -0,0 +1,17 @@
package btools.mapsplitter;
import java.io.File;
/**
* Callbacklistener for WayIterator
*
* @author ab
*/
public interface WayListener
{
void wayFileStart( File wayfile ) throws Exception;
void nextWay( WayData data ) throws Exception;
void wayFileEnd( File wayfile ) throws Exception;
}

View file

@ -0,0 +1,141 @@
package btools.mapsplitter;
import java.util.Arrays;
import java.util.Random;
import org.junit.Assert;
import org.junit.Test;
import btools.mapdecoder.BitReadBuffer;
import btools.mapdecoder.CharDecoder;
public class BitCodingTest
{
// @Test
public void charEncodeDecodeTest()
{
byte[] ab = new byte[4000];
BitWriteBuffer bwb = new BitWriteBuffer( ab );
CharEncoder ce = new CharEncoder();
for( int pass=1; pass<=3; pass++ )
{
ce.encodeDictionary( bwb );
for ( char c = 'a'; c <= 'z'; c++ )
{
ce.encode( Character.valueOf( c ) );
}
}
BitReadBuffer brb = new BitReadBuffer( ab );
CharDecoder cd = new CharDecoder( brb );
for ( char c = 'a'; c <= 'z'; c++ )
{
Character c1 = cd.decode();
Assert.assertTrue( "char mismatch c=" + c + "c1=" + c1, c == c1.charValue() );
}
}
@Test
public void varBitsEncodeDecodeTest()
{
byte[] ab = new byte[4000];
BitWriteBuffer bwb = new BitWriteBuffer( ab );
for ( int i = 0; i < 1000; i++ )
{
bwb.encodeInt( i );
bwb.encodeLong( i );
}
BitReadBuffer brb = new BitReadBuffer( ab );
for ( int i = 0; i < 1000; i++ )
{
int value = brb.decodeInt();
Assert.assertTrue( "int value mismatch i=" + i + "v=" + value, value == i );
long lvalue = brb.decodeLong();
Assert.assertTrue( "long value mismatch i=" + i + "v=" + lvalue, value == i );
}
}
@Test
public void boundedEncodeDecodeTest()
{
byte[] ab = new byte[581969];
BitWriteBuffer bwb = new BitWriteBuffer( ab );
for ( int max = 1; max < 1000; max++ )
{
for ( int val = 0; val <= max; val++ )
{
bwb.encodeBounded( max, val );
}
}
BitReadBuffer brb = new BitReadBuffer( ab );
for ( int max = 1; max < 1000; max++ )
{
for ( int val = 0; val <= max; val++ )
{
long valDecoded = brb.decodeBounded( max );
if ( valDecoded != val )
{
Assert.fail( "mismatch at max=" + max + " " + valDecoded + "<>" + val );
}
}
}
}
@Test
public void sortedLongArrayEncodeDecodeTest()
{
Random rand = new Random(1234);
int size = 20;
long[] values = new long[size];
for ( int i = 0; i < size; i++ )
{
values[i] = rand.nextInt() & 0x0fffffff;
}
values[5] = 175384; // force collision
values[8] = 175384;
values[15] = 275384; // force neighbours
values[18] = 275385;
encodeDecodeArray( "Test1", values );
values = new long[1];
values[0] = 0x134567890123456L;
encodeDecodeArray( "Test2", values );
values = new long[0];
encodeDecodeArray( "Test3", values );
values = new long[100000];
for ( int i = 0; i < values.length; i++ )
{
values[i] = (((long)rand.nextInt())&0xffffffffL) << rand.nextInt(26); // 32 + 25 bits
}
encodeDecodeArray( "Test4", values );
}
private void encodeDecodeArray( String testName, long[] values )
{
Arrays.sort( values );
byte[] ab = new byte[3000000];
BitWriteBuffer bwb = new BitWriteBuffer( ab );
bwb.encodeSortedArray( values );
long[] decodedValues = new BitReadBuffer( ab ).decodeSortedArray();
for ( int i = 0; i < values.length; i++ )
{
if ( values[i] != decodedValues[i] )
{
Assert.fail( "mismatch at " + testName + " i=" + i + " " + values[i] + "<>" + decodedValues[i] );
}
}
}
}

View file

@ -0,0 +1,37 @@
package btools.mapsplitter;
import java.util.Random;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import java.net.URL;
import java.io.File;
import btools.mapdecoder.TileDecoder;
import btools.mapdecoder.OsmTile;
public class MapsplitterTest
{
@Test
public void mapsplitterTest() throws Exception
{
URL mapurl = this.getClass().getResource( "/dreieich.osm.gz" );
Assert.assertTrue( "test-osm-map dreieich.osm not found", mapurl != null );
File mapfile = new File(mapurl.getFile());
File workingDir = mapfile.getParentFile();
File tmpdir = new File( workingDir, "tmp2" );
tmpdir.mkdir();
// run OsmSplitter
File tiles = new File( tmpdir, "tiles" );
tiles.mkdir();
new OsmSplitter().process( tiles, mapfile );
// run TileSplitter to split up to level 12
new TileSplitter().process( tiles );
new TileEncoder().process( new File( tiles, "0/0_0.ntl" ) );
new TileDecoder().process( tiles, null, 12, 2147, 1389 );
}
}

View file

@ -0,0 +1,275 @@
package btools.mapsplitter;
import com.google.protobuf.InvalidProtocolBufferException;
import org.openstreetmap.osmosis.osmbinary.Fileformat;
import org.openstreetmap.osmosis.osmbinary.Osmformat;
import btools.util.LongList;
import java.io.IOException;
import java.util.*;
import java.util.logging.Level;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
import btools.mapcreator.BPbfFieldDecoder;
/**
* Converts PBF block data into decoded entities ready to be passed into an Osmosis pipeline. This
* class is designed to be passed into a pool of worker threads to allow multi-threaded decoding.
* <p/>
* @author Brett Henderson
*/
public class BPbfBlobDecoder2
{
private String blobType;
private byte[] rawBlob;
private OsmParser2 parser;
/**
* Creates a new instance.
* <p/>
* @param blobType The type of blob.
* @param rawBlob The raw data of the blob.
* @param listener The listener for receiving decoding results.
*/
public BPbfBlobDecoder2( String blobType, byte[] rawBlob, OsmParser2 parser )
{
this.blobType = blobType;
this.rawBlob = rawBlob;
this.parser = parser;
}
public void process() throws Exception
{
if ("OSMHeader".equals(blobType))
{
processOsmHeader(readBlobContent());
} else if ("OSMData".equals(blobType))
{
processOsmPrimitives(readBlobContent());
} else
{
System.out.println("Skipping unrecognised blob type " + blobType);
}
}
private byte[] readBlobContent() throws IOException
{
Fileformat.Blob blob = Fileformat.Blob.parseFrom(rawBlob);
byte[] blobData;
if (blob.hasRaw())
{
blobData = blob.getRaw().toByteArray();
} else if (blob.hasZlibData())
{
Inflater inflater = new Inflater();
inflater.setInput(blob.getZlibData().toByteArray());
blobData = new byte[blob.getRawSize()];
try
{
inflater.inflate(blobData);
} catch (DataFormatException e)
{
throw new RuntimeException("Unable to decompress PBF blob.", e);
}
if (!inflater.finished())
{
throw new RuntimeException("PBF blob contains incomplete compressed data.");
}
} else
{
throw new RuntimeException("PBF blob uses unsupported compression, only raw or zlib may be used.");
}
return blobData;
}
private void processOsmHeader( byte[] data ) throws InvalidProtocolBufferException
{
Osmformat.HeaderBlock header = Osmformat.HeaderBlock.parseFrom(data);
// Build the list of active and unsupported features in the file.
List<String> supportedFeatures = Arrays.asList("OsmSchema-V0.6", "DenseNodes");
List<String> activeFeatures = new ArrayList<String>();
List<String> unsupportedFeatures = new ArrayList<String>();
for (String feature : header.getRequiredFeaturesList())
{
if (supportedFeatures.contains(feature))
{
activeFeatures.add(feature);
} else
{
unsupportedFeatures.add(feature);
}
}
// We can't continue if there are any unsupported features. We wait
// until now so that we can display all unsupported features instead of
// just the first one we encounter.
if (unsupportedFeatures.size() > 0)
{
throw new RuntimeException("PBF file contains unsupported features " + unsupportedFeatures);
}
}
private Map<String, String> buildTags( List<Integer> keys, List<Integer> values, BPbfFieldDecoder fieldDecoder )
{
Iterator<Integer> keyIterator = keys.iterator();
Iterator<Integer> valueIterator = values.iterator();
if (keyIterator.hasNext())
{
Map<String, String> tags = new HashMap<String, String>();
while (keyIterator.hasNext())
{
String key = fieldDecoder.decodeString(keyIterator.next());
String value = fieldDecoder.decodeString(valueIterator.next());
tags.put(key, value);
}
return tags;
}
return null;
}
private void processNodes( List<Osmformat.Node> nodes, BPbfFieldDecoder fieldDecoder )
{
for (Osmformat.Node node : nodes)
{
Map<String, String> tags = buildTags(node.getKeysList(), node.getValsList(), fieldDecoder);
parser.addNode( node.getId(), tags, fieldDecoder.decodeLatitude(node
.getLat()), fieldDecoder.decodeLatitude(node.getLon()));
}
}
private void processNodes( Osmformat.DenseNodes nodes, BPbfFieldDecoder fieldDecoder )
{
List<Long> idList = nodes.getIdList();
List<Long> latList = nodes.getLatList();
List<Long> lonList = nodes.getLonList();
Iterator<Integer> keysValuesIterator = nodes.getKeysValsList().iterator();
long nodeId = 0;
long latitude = 0;
long longitude = 0;
for (int i = 0; i < idList.size(); i++)
{
// Delta decode node fields.
nodeId += idList.get(i);
latitude += latList.get(i);
longitude += lonList.get(i);
// Build the tags. The key and value string indexes are sequential
// in the same PBF array. Each set of tags is delimited by an index
// with a value of 0.
Map<String, String> tags = null;
while (keysValuesIterator.hasNext())
{
int keyIndex = keysValuesIterator.next();
if (keyIndex == 0)
{
break;
}
int valueIndex = keysValuesIterator.next();
if (tags == null)
{
tags = new HashMap<String, String>();
}
tags.put(fieldDecoder.decodeString(keyIndex), fieldDecoder.decodeString(valueIndex));
}
parser.addNode( nodeId, tags, ((double) latitude) / 10000000, ((double) longitude) / 10000000);
}
}
private void processWays( List<Osmformat.Way> ways, BPbfFieldDecoder fieldDecoder )
{
for (Osmformat.Way way : ways)
{
Map<String, String> tags = buildTags(way.getKeysList(), way.getValsList(), fieldDecoder);
// Build up the list of way nodes for the way. The node ids are
// delta encoded meaning that each id is stored as a delta against
// the previous one.
long nodeId = 0;
LongList wayNodes = new LongList( 16 );
for (long nodeIdOffset : way.getRefsList())
{
nodeId += nodeIdOffset;
wayNodes.add(nodeId);
}
parser.addWay( way.getId(), tags, wayNodes );
}
}
private ArrayList<String> roles;
private LongList buildRelationMembers(
List<Long> memberIds, List<Integer> memberRoles, List<Osmformat.Relation.MemberType> memberTypes,
BPbfFieldDecoder fieldDecoder )
{
LongList wayIds = new LongList( 16 );
roles = new ArrayList<String>(16);
Iterator<Long> memberIdIterator = memberIds.iterator();
Iterator<Integer> memberRoleIterator = memberRoles.iterator();
Iterator<Osmformat.Relation.MemberType> memberTypeIterator = memberTypes.iterator();
// Build up the list of relation members for the way. The member ids are
// delta encoded meaning that each id is stored as a delta against
// the previous one.
long refId = 0;
while (memberIdIterator.hasNext())
{
Osmformat.Relation.MemberType memberType = memberTypeIterator.next();
refId += memberIdIterator.next();
String role = fieldDecoder.decodeString( memberRoleIterator.next() );
if ( memberType == Osmformat.Relation.MemberType.WAY ) // currently just waymembers
{
wayIds.add( refId );
roles.add( role );
}
}
return wayIds;
}
private void processRelations( List<Osmformat.Relation> relations, BPbfFieldDecoder fieldDecoder )
{
for (Osmformat.Relation relation : relations)
{
Map<String, String> tags = buildTags(relation.getKeysList(), relation.getValsList(), fieldDecoder);
LongList wayIds = buildRelationMembers( relation.getMemidsList(), relation.getRolesSidList(),
relation.getTypesList(), fieldDecoder);
parser.addRelation( relation.getId(), tags, wayIds, roles );
}
}
private void processOsmPrimitives( byte[] data ) throws InvalidProtocolBufferException
{
Osmformat.PrimitiveBlock block = Osmformat.PrimitiveBlock.parseFrom(data);
BPbfFieldDecoder fieldDecoder = new BPbfFieldDecoder(block);
for (Osmformat.PrimitiveGroup primitiveGroup : block.getPrimitivegroupList())
{
processNodes(primitiveGroup.getDense(), fieldDecoder);
processNodes(primitiveGroup.getNodesList(), fieldDecoder);
processWays(primitiveGroup.getWaysList(), fieldDecoder);
processRelations(primitiveGroup.getRelationsList(), fieldDecoder);
}
}
}

View file

@ -0,0 +1,111 @@
package btools.mapsplitter;
import java.io.*;
import java.util.*;
import java.util.zip.*;
import btools.util.*;
import org.openstreetmap.osmosis.osmbinary.Fileformat;
/**
* Parser for OSM data
*
* @author ab
*/
public class OsmParser2 extends MapCreatorBase
{
private BufferedReader _br;
private NodeListener nListener;
private WayListener wListener;
private RelationListener rListener;
public void readMap( File mapFile,
NodeListener nListener,
WayListener wListener,
RelationListener rListener ) throws Exception
{
this.nListener = nListener;
this.wListener = wListener;
this.rListener = rListener;
System.out.println( "*** PBF Parsing (2): " + mapFile );
// once more for testing
int rawBlobCount = 0;
DataInputStream dis = new DataInputStream( new BufferedInputStream ( new FileInputStream( mapFile ) ) );
for(;;)
{
int headerLength;
try
{
headerLength = dis.readInt();
}
catch (EOFException e)
{
break;
}
byte[] headerBuffer = new byte[headerLength];
dis.readFully(headerBuffer);
Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(headerBuffer);
byte[] blobData = new byte[blobHeader.getDatasize()];
dis.readFully(blobData);
new BPbfBlobDecoder2( blobHeader.getType(), blobData, this ).process();
rawBlobCount++;
}
dis.close();
System.out.println( "read raw blobs: " + rawBlobCount );
}
public void addNode( long nid, Map<String, String> tags, double lat, double lon )
{
NodeData n = new NodeData( nid, lon, lat );
n.setTags( (HashMap<String,String>)tags );
try
{
nListener.nextNode( n );
}
catch( Exception e )
{
throw new RuntimeException( "error writing node: " + e );
}
}
public void addWay( long wid, Map<String, String> tags, LongList nodes )
{
WayData w = new WayData( wid, nodes );
w.setTags( (HashMap<String,String>)tags );
try
{
wListener.nextWay( w );
}
catch( Exception e )
{
throw new RuntimeException( "error writing way: " + e );
}
}
public void addRelation( long rid, Map<String, String> tags, LongList wayIds, List<String> roles )
{
RelationData r = new RelationData( rid, wayIds, roles );
r.setTags( (HashMap<String,String>)tags );
try
{
rListener.nextRelation( r );
}
catch( Exception e )
{
throw new RuntimeException( "error writing relation: " + e );
}
}
}