Index: trunk/src/org/openstreetmap/josm/actions/ExtensionFileFilter.java
===================================================================
--- trunk/src/org/openstreetmap/josm/actions/ExtensionFileFilter.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/actions/ExtensionFileFilter.java	(revision 18695)
@@ -28,4 +28,5 @@
 import org.openstreetmap.josm.gui.io.importexport.OsmChangeImporter;
 import org.openstreetmap.josm.gui.io.importexport.OsmImporter;
+import org.openstreetmap.josm.gui.io.importexport.OsmPbfImporter;
 import org.openstreetmap.josm.gui.io.importexport.OziWptImporter;
 import org.openstreetmap.josm.gui.io.importexport.RtkLibImporter;
@@ -66,4 +67,5 @@
                 OsmImporter.class,
                 OsmChangeImporter.class,
+                OsmPbfImporter.class,
                 GeoJSONImporter.class,
                 GpxImporter.class,
Index: trunk/src/org/openstreetmap/josm/data/Bounds.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/Bounds.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/data/Bounds.java	(revision 18695)
@@ -7,5 +7,5 @@
 import java.text.DecimalFormat;
 import java.text.MessageFormat;
-import java.util.Objects;
+import java.util.Arrays;
 
 import org.openstreetmap.josm.data.coor.ILatLon;
@@ -593,5 +593,5 @@
     @Override
     public int hashCode() {
-        return Objects.hash(minLat, minLon, maxLat, maxLon);
+        return Arrays.hashCode(new double[] {minLat, minLon, maxLat, maxLon});
     }
 
Index: trunk/src/org/openstreetmap/josm/data/imagery/vectortile/mapbox/CommandInteger.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/imagery/vectortile/mapbox/CommandInteger.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/data/imagery/vectortile/mapbox/CommandInteger.java	(revision 18695)
@@ -51,4 +51,13 @@
 
     /**
+     * Add a parameter
+     * @param parameterInteger The parameter to add (converted to {@code short}).
+     * @since xxx
+     */
+    public void addParameter(long parameterInteger) {
+        this.parameters[added++] = (short) parameterInteger;
+    }
+
+    /**
      * Get the operations for the command
      * @return The operations
Index: trunk/src/org/openstreetmap/josm/data/imagery/vectortile/mapbox/Feature.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/imagery/vectortile/mapbox/Feature.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/data/imagery/vectortile/mapbox/Feature.java	(revision 18695)
@@ -28,5 +28,5 @@
     /**
      * The number format instance to use (using a static instance gets rid of quite o few allocations)
-     * Doing this reduced the allocations of {@link #parseTagValue(String, Layer, Number, List)} from 22.79% of parent to
+     * Doing this reduced the allocations of {@link #parseTagValue(String, Layer, int, List)} from 22.79% of parent to
      * 12.2% of parent.
      */
@@ -75,5 +75,5 @@
                     if (next.getField() == TAG_FIELD) {
                         // This is packed in v1 and v2
-                        ProtobufPacked packed = new ProtobufPacked(byteArrayOutputStream, next.getBytes());
+                        ProtobufPacked packed = new ProtobufPacked(next.getBytes());
                         if (tagList == null) {
                             tagList = new ArrayList<>(packed.getArray().length);
@@ -81,17 +81,17 @@
                             tagList.ensureCapacity(tagList.size() + packed.getArray().length);
                         }
-                        for (Number number : packed.getArray()) {
-                            key = parseTagValue(key, layer, number, tagList);
+                        for (long number : packed.getArray()) {
+                            key = parseTagValue(key, layer, (int) number, tagList);
                         }
                     } else if (next.getField() == GEOMETRY_FIELD) {
                         // This is packed in v1 and v2
-                        ProtobufPacked packed = new ProtobufPacked(byteArrayOutputStream, next.getBytes());
+                        ProtobufPacked packed = new ProtobufPacked(next.getBytes());
                         CommandInteger currentCommand = null;
-                        for (Number number : packed.getArray()) {
+                        for (long number : packed.getArray()) {
                             if (currentCommand != null && currentCommand.hasAllExpectedParameters()) {
                                 currentCommand = null;
                             }
                             if (currentCommand == null) {
-                                currentCommand = new CommandInteger(number.intValue());
+                                currentCommand = new CommandInteger(Math.toIntExact(number));
                                 this.geometry.add(currentCommand);
                             } else {
@@ -128,10 +128,10 @@
      * @return The new key (if {@code null}, then a value was parsed and added to tags)
      */
-    private static String parseTagValue(String key, Layer layer, Number number, List<String> tagList) {
+    private static String parseTagValue(String key, Layer layer, int number, List<String> tagList) {
         if (key == null) {
-            key = layer.getKey(number.intValue());
+            key = layer.getKey(number);
         } else {
             tagList.add(key);
-            Object value = layer.getValue(number.intValue());
+            Object value = layer.getValue(number);
             if (value instanceof Double || value instanceof Float) {
                 // reset grouping if the instance is a singleton
Index: trunk/src/org/openstreetmap/josm/data/osm/pbf/Blob.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/osm/pbf/Blob.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/data/osm/pbf/Blob.java	(revision 18695)
@@ -0,0 +1,102 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.data.osm.pbf;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.zip.InflaterInputStream;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
+import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream;
+import org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream;
+
+/**
+ * A "Blob" of data from an OSM PBF file. It, in turn, contains additional data in PBF format, which may be compressed.
+ * @since xxx
+ */
+public final class Blob {
+    public enum CompressionType {
+        /** No compression */
+        raw,
+        /** zlib compression */
+        zlib,
+        /** lzma compression (optional) */
+        lzma,
+        /** bzip2 compression (deprecated in 2010, so if we ever support saving PBF files, <i>don't use this compression type</i>) */
+        bzip2,
+        /** lz4 compression (optional) */
+        lz4,
+        /** zstd compression (optional) */
+        zstd
+    }
+
+    private final Integer rawSize;
+    private final CompressionType compressionType;
+    private final byte[] bytes;
+    public Blob(@Nullable Integer rawSize, @Nonnull CompressionType compressionType, @Nonnull byte... bytes) {
+        this.rawSize = rawSize;
+        this.compressionType = compressionType;
+        this.bytes = bytes;
+    }
+
+    /**
+     * The raw size of the blob (after decompression)
+     * @return The raw size
+     */
+    @Nullable
+    public Integer rawSize() {
+        return this.rawSize;
+    }
+
+    /**
+     * The compression type of the blob
+     * @return The compression type
+     */
+    @Nonnull
+    public CompressionType compressionType() {
+        return this.compressionType;
+    }
+
+    /**
+     * The bytes that make up the blob data
+     * @return The bytes
+     */
+    @Nonnull
+    public byte[] bytes() {
+        return this.bytes;
+    }
+
+    /**
+     * Get the decompressed inputstream for this blob
+     * @return The decompressed inputstream
+     * @throws IOException if we don't support the compression type <i>or</i> the decompressor has issues, see
+     * <ul>
+     *     <li>{@link LZMACompressorInputStream}</li>
+     *     <li>{@link ZstdCompressorInputStream}</li>
+     *     <li>{@link BZip2CompressorInputStream}</li>
+     * </ul>
+     */
+    @Nonnull
+    public InputStream inputStream() throws IOException {
+        final ByteArrayInputStream bais = new ByteArrayInputStream(this.bytes);
+        switch (this.compressionType) {
+            case raw:
+                return bais;
+            case lzma:
+                return new LZMACompressorInputStream(bais);
+            case zstd:
+                return new ZstdCompressorInputStream(bais);
+            case bzip2:
+                return new BZip2CompressorInputStream(bais);
+            case lz4:
+                throw new IOException("lz4 pbf is not currently supported");
+            case zlib:
+                return new InflaterInputStream(bais);
+            default:
+                throw new IOException("unknown compression type is not currently supported: " + this.compressionType.name());
+        }
+    }
+}
Index: trunk/src/org/openstreetmap/josm/data/osm/pbf/BlobHeader.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/osm/pbf/BlobHeader.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/data/osm/pbf/BlobHeader.java	(revision 18695)
@@ -0,0 +1,33 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.data.osm.pbf;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
+/**
+ * A "BlobHeader" which contains metadata for a {@link Blob}.
+ * @since xxx
+ */
+public final class BlobHeader {
+    private final String type;
+    private final byte[] indexData;
+    private final int dataSize;
+
+    public BlobHeader(@Nonnull String type, @Nullable byte[] indexData, int dataSize) {
+        this.type = type;
+        this.indexData = indexData;
+        this.dataSize = dataSize;
+    }
+
+    public String type() {
+        return this.type;
+    }
+
+    public byte[] indexData() {
+        return this.indexData;
+    }
+
+    public int dataSize() {
+        return this.dataSize;
+    }
+}
Index: trunk/src/org/openstreetmap/josm/data/osm/pbf/HeaderBlock.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/osm/pbf/HeaderBlock.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/data/osm/pbf/HeaderBlock.java	(revision 18695)
@@ -0,0 +1,119 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.data.osm.pbf;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
+import org.openstreetmap.josm.data.osm.BBox;
+
+/**
+ * The header block contains data on required features, optional features, the bbox of the data, the source, the osmosis replication timestamp,
+ * the osmosis replication sequence number, and the osmosis replication base url
+ * @since xxx
+ */
+public final class HeaderBlock {
+    private final BBox bbox;
+    private final String[] requiredFeatures;
+    private final String[] optionalFeatures;
+    private final String writingProgram;
+    private final String source;
+    private final Long osmosisReplicationTimestamp;
+    private final Long osmosisReplicationSequenceNumber;
+    private final String osmosisReplicationBaseUrl;
+
+    /**
+     * Create a new {@link HeaderBlock} for an OSM PBF file
+     * @param bbox The bbox
+     * @param requiredFeatures The required features
+     * @param optionalFeatures The optional features
+     * @param writingProgram The program used to write the file
+     * @param source The source
+     * @param osmosisReplicationTimestamp The last time that osmosis updated the source (in seconds since epoch)
+     * @param osmosisReplicationSequenceNumber The replication sequence number
+     * @param osmosisReplicationBaseUrl The replication base url
+     */
+    public HeaderBlock(@Nullable BBox bbox, @Nonnull String[] requiredFeatures, @Nonnull String[] optionalFeatures,
+                       @Nullable String writingProgram, @Nullable String source, @Nullable Long osmosisReplicationTimestamp,
+                       @Nullable Long osmosisReplicationSequenceNumber, @Nullable String osmosisReplicationBaseUrl) {
+        this.bbox = bbox;
+        this.requiredFeatures = requiredFeatures;
+        this.optionalFeatures = optionalFeatures;
+        this.writingProgram = writingProgram;
+        this.source = source;
+        this.osmosisReplicationTimestamp = osmosisReplicationTimestamp;
+        this.osmosisReplicationSequenceNumber = osmosisReplicationSequenceNumber;
+        this.osmosisReplicationBaseUrl = osmosisReplicationBaseUrl;
+    }
+
+    /**
+     * The required features to parse the PBF
+     * @return The required features
+     */
+    @Nonnull
+    public String[] requiredFeatures() {
+        return this.requiredFeatures.clone();
+    }
+
+    /**
+     * The optional features to parse the PBF
+     * @return The optional features
+     */
+    @Nonnull
+    public String[] optionalFeatures() {
+        return this.optionalFeatures.clone();
+    }
+
+    /**
+     * Get the program used to write the PBF
+     * @return The program that wrote the PBF
+     */
+    @Nullable
+    public String writingProgram() {
+        return this.writingProgram;
+    }
+
+    /**
+     * The source
+     * @return The source (same as bbox field from OSM)
+     */
+    @Nullable
+    public String source() {
+        return this.source;
+    }
+
+    /**
+     * The replication timestamp
+     * @return The time that the file was last updated
+     */
+    @Nullable
+    public Long osmosisReplicationTimestamp() {
+        return this.osmosisReplicationTimestamp;
+    }
+
+    /**
+     * The replication sequence number
+     * @return The sequence number
+     */
+    @Nullable
+    public Long osmosisReplicationSequenceNumber() {
+        return this.osmosisReplicationSequenceNumber;
+    }
+
+    /**
+     * The replication base URL
+     * @return the base url for replication, if we ever want/need to continue the replication
+     */
+    @Nullable
+    public String osmosisReplicationBaseUrl() {
+        return this.osmosisReplicationBaseUrl;
+    }
+
+    /**
+     * The bbox
+     * @return The bbox
+     */
+    @Nullable
+    public BBox bbox() {
+        return this.bbox;
+    }
+}
Index: trunk/src/org/openstreetmap/josm/data/osm/pbf/Info.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/osm/pbf/Info.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/data/osm/pbf/Info.java	(revision 18695)
@@ -0,0 +1,51 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.data.osm.pbf;
+
+import javax.annotation.Nullable;
+
+/**
+ * Optional metadata for primitives
+ * @since xxx
+ */
+public final class Info {
+    private final int version;
+    private final Long timestamp;
+    private final Long changeset;
+    private final Integer uid;
+    private final Integer userSid;
+    private final boolean visible;
+
+    public Info(int version, @Nullable Long timestamp, @Nullable Long changeset, @Nullable Integer uid, @Nullable Integer userSid,
+                boolean visible) {
+        this.version = version;
+        this.timestamp = timestamp;
+        this.changeset = changeset;
+        this.uid = uid;
+        this.userSid = userSid;
+        this.visible = visible;
+    }
+
+    public int version() {
+        return this.version;
+    }
+
+    public Long timestamp() {
+        return this.timestamp;
+    }
+
+    public Long changeset() {
+        return this.changeset;
+    }
+
+    public Integer uid() {
+        return this.uid;
+    }
+
+    public Integer userSid() {
+        return this.userSid;
+    }
+
+    public boolean isVisible() {
+        return this.visible;
+    }
+}
Index: trunk/src/org/openstreetmap/josm/data/osm/pbf/OsmPbfFile.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/osm/pbf/OsmPbfFile.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/data/osm/pbf/OsmPbfFile.java	(revision 18695)
@@ -0,0 +1,27 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.data.osm.pbf;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * A class used to determine whether or not a file may be an OSM PBF file
+ * @since xxx
+ */
+public final class OsmPbfFile {
+    /**
+     * Extensions for OSM PBF files.
+     * {@code "osm.pbf"} is a SHOULD, <i>not</i> a MUST.
+     */
+    public static final List<String> EXTENSION = Collections.unmodifiableList(Arrays.asList("osm.pbf", "pbf"));
+
+    /**
+     * mimetypes for OSM PBF files
+     */
+    public static final List<String> MIMETYPE = Collections.emptyList();
+
+    private OsmPbfFile() {
+        // Hide the constructor
+    }
+}
Index: trunk/src/org/openstreetmap/josm/data/osm/pbf/package-info.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/osm/pbf/package-info.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/data/osm/pbf/package-info.java	(revision 18695)
@@ -0,0 +1,9 @@
+// License: GPL. For details, see LICENSE file.
+/**
+ * A package for reading OSM PBF files
+ * See <a href="https://wiki.openstreetmap.org/wiki/PBF_Format">PBF format</a> for details.
+ *
+ * Note: {@link org.openstreetmap.josm.data.osm.pbf.BlobHeader} and {@link org.openstreetmap.josm.data.osm.pbf.Blob} are the "root" messages.
+ * The remaining messages are part of the {@link org.openstreetmap.josm.data.osm.pbf.Blob}.
+ */
+package org.openstreetmap.josm.data.osm.pbf;
Index: trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufPacked.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufPacked.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufPacked.java	(revision 18695)
@@ -3,6 +3,5 @@
 
 import java.io.ByteArrayOutputStream;
-import java.util.ArrayList;
-import java.util.List;
+import java.util.Arrays;
 
 /**
@@ -13,7 +12,6 @@
  */
 public class ProtobufPacked {
-    private static final Number[] NO_NUMBERS = new Number[0];
     private final byte[] bytes;
-    private final Number[] numbers;
+    private final long[] numbers;
     private int location;
 
@@ -21,8 +19,20 @@
      * Create a new ProtobufPacked object
      *
-     * @param byteArrayOutputStream A reusable ByteArrayOutputStream (helps to reduce memory allocations)
+     * @param ignored A reusable ByteArrayOutputStream (no longer used)
      * @param bytes The packed bytes
+     * @deprecated since we aren't using the output stream anymore
      */
-    public ProtobufPacked(ByteArrayOutputStream byteArrayOutputStream, byte[] bytes) {
+    @Deprecated
+    public ProtobufPacked(ByteArrayOutputStream ignored, byte[] bytes) {
+        this(bytes);
+    }
+
+    /**
+     * Create a new ProtobufPacked object
+     *
+     * @param bytes The packed bytes
+     * @since 18695
+     */
+    public ProtobufPacked(byte[] bytes) {
         this.location = 0;
         this.bytes = bytes;
@@ -31,11 +41,19 @@
         // only adds 3.7 MB to the ArrayList#init calls. Note that the real-world test case (Mapillary vector tiles)
         // primarily created Shorts.
-        List<Number> numbersT = new ArrayList<>(bytes.length);
+        long[] numbersT = new long[bytes.length];
+        int index = 0;
         // By reusing a ByteArrayOutputStream, we can reduce allocations in nextVarInt from 230 MB to 74 MB.
         while (this.location < bytes.length) {
-            numbersT.add(ProtobufParser.convertByteArray(this.nextVarInt(byteArrayOutputStream), ProtobufParser.VAR_INT_BYTE_SIZE));
+            int start = this.location;
+            numbersT[index] = ProtobufParser.convertByteArray(this.bytes, ProtobufParser.VAR_INT_BYTE_SIZE,
+                    start, this.nextVarInt());
+            index++;
         }
 
-        this.numbers = numbersT.toArray(NO_NUMBERS);
+        if (numbersT.length == index) {
+            this.numbers = numbersT;
+        } else {
+            this.numbers = Arrays.copyOf(numbersT, index);
+        }
     }
 
@@ -45,25 +63,20 @@
      * @return The number array
      */
-    public Number[] getArray() {
+    public long[] getArray() {
         return this.numbers;
     }
 
-    private byte[] nextVarInt(final ByteArrayOutputStream byteArrayOutputStream) {
-        // In a real world test, the largest List<Byte> seen had 3 elements. Use 4 to avoid most new array allocations.
-        // Memory allocations went from 368 MB to 280 MB by using an initial array allocation. When using a
-        // ByteArrayOutputStream, it went down to 230 MB. By further reusing the ByteArrayOutputStream between method
-        // calls, it went down further to 73 MB.
+    /**
+     * Gets the location where the next var int begins. Note: changes {@link ProtobufPacked#location}.
+     * @return The next varint location
+     */
+    private int nextVarInt() {
         while ((this.bytes[this.location] & ProtobufParser.MOST_SIGNIFICANT_BYTE)
           == ProtobufParser.MOST_SIGNIFICANT_BYTE) {
             // Get rid of the leading bit (shift left 1, then shift right 1 unsigned)
-            byteArrayOutputStream.write(this.bytes[this.location++] ^ ProtobufParser.MOST_SIGNIFICANT_BYTE);
+            this.bytes[this.location] = (byte) (this.bytes[this.location] ^ ProtobufParser.MOST_SIGNIFICANT_BYTE);
+            this.location++;
         }
-        // The last byte doesn't drop the most significant bit
-        byteArrayOutputStream.write(this.bytes[this.location++]);
-        try {
-            return byteArrayOutputStream.toByteArray();
-        } finally {
-            byteArrayOutputStream.reset();
-        }
+        return ++this.location;
     }
 }
Index: trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufParser.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufParser.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufParser.java	(revision 18695)
@@ -8,4 +8,5 @@
 import java.io.InputStream;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 
@@ -31,4 +32,6 @@
      */
     static final byte MOST_SIGNIFICANT_BYTE = (byte) (1 << 7);
+    private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
+
     /**
      * Convert a byte array to a number (little endian)
@@ -39,10 +42,25 @@
      */
     public static Number convertByteArray(byte[] bytes, byte byteSize) {
+        return convertLong(convertByteArray(bytes, byteSize, 0, bytes.length));
+    }
+
+    /**
+     * Convert a byte array to a number (little endian)
+     *
+     * @param bytes    The bytes to convert
+     * @param byteSize The size of the byte. For var ints, this is 7, for other ints, this is 8.
+     * @param start    The start position in the byte array
+     * @param end      The end position in the byte array (exclusive - [start, end) )
+     * @return t
+     * he number from the byte array. Depending upon length of time the number will be stored, narrowing may be helpful.
+     * @since 18695
+     */
+    public static long convertByteArray(byte[] bytes, byte byteSize, int start, int end) {
         long number = 0;
-        for (int i = 0; i < bytes.length; i++) {
+        for (int i = start; i < end; i++) {
             // Need to convert to uint64 in order to avoid bit operation from filling in 1's and overflow issues
-            number += Byte.toUnsignedLong(bytes[i]) << (byteSize * i);
-        }
-        return convertLong(number);
+            number += Byte.toUnsignedLong(bytes[i]) << (byteSize * (i - start));
+        }
+        return number;
     }
 
@@ -72,6 +90,16 @@
      */
     public static Number decodeZigZag(Number signed) {
-        final long value = signed.longValue();
-        return convertLong((value >> 1) ^ -(value & 1));
+        return convertLong(decodeZigZag(signed.longValue()));
+    }
+
+    /**
+     * Decode a zig-zag encoded value
+     *
+     * @param signed The value to decode
+     * @return The decoded value
+     * @since 18695
+     */
+    public static long decodeZigZag(long signed) {
+        return (signed >> 1) ^ -(signed & 1);
     }
 
@@ -204,5 +232,6 @@
      */
     public byte[] nextLengthDelimited(ByteArrayOutputStream byteArrayOutputStream) throws IOException {
-        int length = convertByteArray(this.nextVarInt(byteArrayOutputStream), VAR_INT_BYTE_SIZE).intValue();
+        final byte[] nextVarInt = this.nextVarInt(byteArrayOutputStream);
+        int length = (int) convertByteArray(nextVarInt, VAR_INT_BYTE_SIZE, 0, nextVarInt.length);
         return readNextBytes(length);
     }
@@ -237,11 +266,14 @@
      *
      * @param size The number of bytes to read
-     * @return a byte array of the specified size, filled with bytes read (unsigned)
+     * @return a byte array filled with bytes read (unsigned)
      * @throws IOException - if an IO error occurs
      */
     private byte[] readNextBytes(int size) throws IOException {
         byte[] bytesRead = new byte[size];
-        for (int i = 0; i < bytesRead.length; i++) {
-            bytesRead[i] = (byte) this.nextByte();
+        int read = this.inputStream.read(bytesRead);
+        if (read == -1) {
+            return EMPTY_BYTE_ARRAY;
+        } else if (read != size) {
+            return Arrays.copyOf(bytesRead, read);
         }
         return bytesRead;
Index: trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufRecord.java
===================================================================
--- trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufRecord.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/data/protobuf/ProtobufRecord.java	(revision 18695)
@@ -28,9 +28,10 @@
      */
     public ProtobufRecord(ByteArrayOutputStream byteArrayOutputStream, ProtobufParser parser) throws IOException {
-        Number number = ProtobufParser.convertByteArray(parser.nextVarInt(byteArrayOutputStream), ProtobufParser.VAR_INT_BYTE_SIZE);
+        final byte[] varInt = parser.nextVarInt(byteArrayOutputStream);
+        long number = ProtobufParser.convertByteArray(varInt, ProtobufParser.VAR_INT_BYTE_SIZE, 0, varInt.length);
         // I don't foresee having field numbers > {@code Integer#MAX_VALUE >> 3}
-        this.field = (int) number.longValue() >> 3;
+        this.field = (int) number >> 3;
         // 7 is 111 (so last three bits)
-        byte wireType = (byte) (number.longValue() & 7);
+        byte wireType = (byte) (number & 7);
         // By not using a stream, we reduce the number of allocations (for getting the WireType) from 257 MB to 40 MB.
         // (The remaining 40 MB is from WireType#values). By using the cached getAllValues(), we drop the 40 MB.
Index: trunk/src/org/openstreetmap/josm/gui/io/importexport/OsmPbfImporter.java
===================================================================
--- trunk/src/org/openstreetmap/josm/gui/io/importexport/OsmPbfImporter.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/gui/io/importexport/OsmPbfImporter.java	(revision 18695)
@@ -0,0 +1,46 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.gui.io.importexport;
+
+import static org.openstreetmap.josm.tools.I18n.tr;
+
+import java.io.InputStream;
+import java.util.Arrays;
+
+import org.openstreetmap.josm.actions.ExtensionFileFilter;
+import org.openstreetmap.josm.data.osm.DataSet;
+import org.openstreetmap.josm.gui.progress.ProgressMonitor;
+import org.openstreetmap.josm.io.IllegalDataException;
+import org.openstreetmap.josm.io.OsmPbfReader;
+
+/**
+ * File importer that reads *.osm.pbf data files.
+ * @since xxx
+ */
+public class OsmPbfImporter extends OsmImporter {
+    /**
+     * The OSM file filter (*.osm.pbf files).
+     */
+    public static final ExtensionFileFilter FILE_FILTER = ExtensionFileFilter.newFilterWithArchiveExtensions(
+            "osm.pbf", "osm.pbf", tr("OSM PBF Files") + " (*.osm.pbf, *.osm.pbf.gz, *.osm.pbf.bz2, *.osm.pbf.xz, *.osm.pbf.zip)",
+            ExtensionFileFilter.AddArchiveExtension.NONE, Arrays.asList("gz", "bz", "bz2", "xz", "zip"));
+
+    /**
+     * Constructs a new {@code OsmPbfImporter}.
+     */
+    public OsmPbfImporter() {
+        super(FILE_FILTER);
+    }
+
+    /**
+     * Constructs a new {@code OsmPbfImporter} with the given extension file filter.
+     * @param filter The extension file filter
+     */
+    public OsmPbfImporter(ExtensionFileFilter filter) {
+        super(filter);
+    }
+
+    @Override
+    protected DataSet parseDataSet(InputStream in, ProgressMonitor progressMonitor) throws IllegalDataException {
+        return OsmPbfReader.parseDataSet(in, progressMonitor);
+    }
+}
Index: trunk/src/org/openstreetmap/josm/io/AbstractReader.java
===================================================================
--- trunk/src/org/openstreetmap/josm/io/AbstractReader.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/io/AbstractReader.java	(revision 18695)
@@ -274,4 +274,19 @@
     protected abstract DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor) throws IllegalDataException;
 
+    /**
+     * An interface for reading binary data
+     * @since xxx
+     */
+    @FunctionalInterface
+    protected interface BinaryParserWorker {
+        /**
+         * Effectively parses the file, depending on the binary format (PBF, etc.)
+         * @param ir input stream reader
+         * @throws IllegalDataException in case of invalid data
+         * @throws IOException in case of I/O error
+         */
+        void accept(InputStream ir) throws IllegalDataException, IOException;
+    }
+
     @FunctionalInterface
     protected interface ParserWorker {
@@ -285,5 +300,15 @@
     }
 
+    protected final DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor, BinaryParserWorker parserWorker)
+            throws IllegalDataException {
+        return this.doParseDataSet(source, progressMonitor, (Object) parserWorker);
+    }
+
     protected final DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor, ParserWorker parserWorker)
+            throws IllegalDataException {
+        return this.doParseDataSet(source, progressMonitor, (Object) parserWorker);
+    }
+
+    private DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor, Object parserWorker)
             throws IllegalDataException {
         if (progressMonitor == null) {
@@ -297,6 +322,12 @@
             progressMonitor.indeterminateSubTask(tr("Parsing OSM data..."));
 
-            try (InputStreamReader ir = UTFInputStreamReader.create(source)) {
-                parserWorker.accept(ir);
+            if (parserWorker instanceof ParserWorker) {
+                try (InputStreamReader ir = UTFInputStreamReader.create(source)) {
+                    ((ParserWorker) parserWorker).accept(ir);
+                }
+            } else if (parserWorker instanceof BinaryParserWorker) {
+                ((BinaryParserWorker) parserWorker).accept(source);
+            } else {
+                throw new IllegalArgumentException("Unknown parser worker type: " + parserWorker.getClass());
             }
             progressMonitor.worked(1);
Index: trunk/src/org/openstreetmap/josm/io/OsmJsonReader.java
===================================================================
--- trunk/src/org/openstreetmap/josm/io/OsmJsonReader.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/io/OsmJsonReader.java	(revision 18695)
@@ -183,5 +183,5 @@
     protected DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor) throws IllegalDataException {
         try {
-            return doParseDataSet(source, progressMonitor, ir -> {
+            return doParseDataSet(source, progressMonitor, (ParserWorker) ir -> {
                 setParser(Json.createParser(ir));
                 parse();
Index: trunk/src/org/openstreetmap/josm/io/OsmPbfReader.java
===================================================================
--- trunk/src/org/openstreetmap/josm/io/OsmPbfReader.java	(revision 18695)
+++ trunk/src/org/openstreetmap/josm/io/OsmPbfReader.java	(revision 18695)
@@ -0,0 +1,988 @@
+// License: GPL. For details, see LICENSE file.
+package org.openstreetmap.josm.io;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
+import org.apache.commons.compress.utils.CountingInputStream;
+import org.openstreetmap.josm.data.Bounds;
+import org.openstreetmap.josm.data.DataSource;
+import org.openstreetmap.josm.data.coor.LatLon;
+import org.openstreetmap.josm.data.osm.BBox;
+import org.openstreetmap.josm.data.osm.DataSet;
+import org.openstreetmap.josm.data.osm.NodeData;
+import org.openstreetmap.josm.data.osm.OsmPrimitiveType;
+import org.openstreetmap.josm.data.osm.PrimitiveData;
+import org.openstreetmap.josm.data.osm.RelationData;
+import org.openstreetmap.josm.data.osm.RelationMemberData;
+import org.openstreetmap.josm.data.osm.Tagged;
+import org.openstreetmap.josm.data.osm.User;
+import org.openstreetmap.josm.data.osm.WayData;
+import org.openstreetmap.josm.data.osm.pbf.Blob;
+import org.openstreetmap.josm.data.osm.pbf.BlobHeader;
+import org.openstreetmap.josm.data.osm.pbf.HeaderBlock;
+import org.openstreetmap.josm.data.osm.pbf.Info;
+import org.openstreetmap.josm.data.protobuf.ProtobufPacked;
+import org.openstreetmap.josm.data.protobuf.ProtobufParser;
+import org.openstreetmap.josm.data.protobuf.ProtobufRecord;
+import org.openstreetmap.josm.data.protobuf.WireType;
+import org.openstreetmap.josm.gui.progress.NullProgressMonitor;
+import org.openstreetmap.josm.gui.progress.ProgressMonitor;
+import org.openstreetmap.josm.tools.Utils;
+
+/**
+ * Read OSM data from an OSM PBF file
+ * @since xxx
+ */
+public final class OsmPbfReader extends AbstractReader {
+    private static final long[] EMPTY_LONG = new long[0];
+    /**
+     * Nano degrees
+     */
+    private static final double NANO_DEGREES = 1e-9;
+    /**
+     * The maximum BlobHeader size. BlobHeaders should (but not must) be less than half this
+     */
+    private static final int MAX_BLOBHEADER_SIZE = 64 * 1024;
+    /**
+     * The maximum Blob size. Blobs should (but not must) be less than half this
+     */
+    private static final int MAX_BLOB_SIZE = 32 * 1024 * 1024;
+
+    private OsmPbfReader() {
+        // Hide constructor
+    }
+
+    /**
+     * Parse the given input source and return the dataset.
+     *
+     * @param source          the source input stream. Must not be null.
+     * @param progressMonitor the progress monitor. If null, {@link NullProgressMonitor#INSTANCE} is assumed
+     * @return the dataset with the parsed data
+     * @throws IllegalDataException     if an error was found while parsing the data from the source
+     * @throws IllegalArgumentException if source is null
+     */
+    public static DataSet parseDataSet(InputStream source, ProgressMonitor progressMonitor) throws IllegalDataException {
+        return new OsmPbfReader().doParseDataSet(source, progressMonitor);
+    }
+
+    @Override
+    protected DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor) throws IllegalDataException {
+        return doParseDataSet(source, progressMonitor, this::parse);
+    }
+
+    private DataSet parse(InputStream source) throws IllegalDataException, IOException {
+        final CountingInputStream inputStream;
+        if (source.markSupported()) {
+            inputStream = new CountingInputStream(source);
+        } else {
+            inputStream = new CountingInputStream(new BufferedInputStream(source));
+        }
+        try (ProtobufParser parser = new ProtobufParser(inputStream)) {
+            final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            HeaderBlock headerBlock = null;
+            BlobHeader blobHeader = null;
+            while (parser.hasNext() && !this.cancel) {
+                if (blobHeader == null) {
+                    blobHeader = parseBlobHeader(inputStream, baos, parser);
+                } else if ("OSMHeader".equals(blobHeader.type())) {
+                    if (headerBlock != null) {
+                        throw new IllegalDataException("Too many header blocks in protobuf");
+                    }
+                    // OSM PBF is fun -- it has *nested* pbf data
+                    Blob blob = parseBlob(blobHeader, inputStream, parser, baos);
+                    headerBlock = parseHeaderBlock(blob, baos);
+                    checkRequiredFeatures(headerBlock);
+                    blobHeader = null;
+                } else if ("OSMData".equals(blobHeader.type())) {
+                    if (headerBlock == null) {
+                        throw new IllegalStateException("A header block must occur before the first data block");
+                    }
+                    Blob blob = parseBlob(blobHeader, inputStream, parser, baos);
+                    parseDataBlock(baos, headerBlock, blob);
+                    blobHeader = null;
+                } // Other software *may* extend the FileBlocks (from just "OSMHeader" and "OSMData"), so don't throw an error.
+            }
+        }
+        return this.getDataSet();
+    }
+
+    /**
+     * Parse a blob header
+     *
+     * @param cis    A counting stream to ensure we don't read too much data
+     * @param baos   A reusable stream
+     * @param parser The parser to read from
+     * @return The BlobHeader message
+     * @throws IOException          if one of the streams has an issue
+     * @throws IllegalDataException If the OSM PBF is (probably) corrupted
+     */
+    @Nonnull
+    private BlobHeader parseBlobHeader(CountingInputStream cis, ByteArrayOutputStream baos, ProtobufParser parser)
+            throws IOException, IllegalDataException {
+        String type = null;
+        byte[] indexData = null;
+        int datasize = Integer.MIN_VALUE;
+        int length = 0;
+        long start = cis.getBytesRead();
+        while (parser.hasNext() && (length == 0 || cis.getBytesRead() - start < length)) {
+            final ProtobufRecord current = new ProtobufRecord(baos, parser);
+            switch (current.getField()) {
+                case 1:
+                    type = current.asString();
+                    break;
+                case 2:
+                    indexData = current.getBytes();
+                    break;
+                case 3:
+                    datasize = current.asUnsignedVarInt().intValue();
+                    break;
+                default:
+                    start = cis.getBytesRead();
+                    length += current.asUnsignedVarInt().intValue();
+                    if (length > MAX_BLOBHEADER_SIZE) { // There is a hard limit of 64 KiB for the BlobHeader. It *should* be less than 32 KiB.
+                        throw new IllegalDataException("OSM PBF BlobHeader is too large. PBF is probably corrupted. (" +
+                                Utils.getSizeString(MAX_BLOBHEADER_SIZE, Locale.ENGLISH) + " < " + Utils.getSizeString(length, Locale.ENGLISH));
+                    }
+            }
+        }
+        if (type == null || Integer.MIN_VALUE == datasize) {
+            throw new IllegalDataException("OSM PBF BlobHeader could not be read. PBF is probably corrupted.");
+        } else if (datasize > MAX_BLOB_SIZE) { // There is a hard limit of 32 MiB for the blob size. It *should* be less than 16 MiB.
+            throw new IllegalDataException("OSM PBF Blob size is too large. PBF is probably corrupted. ("
+                    + Utils.getSizeString(MAX_BLOB_SIZE, Locale.ENGLISH) + " < " + Utils.getSizeString(datasize, Locale.ENGLISH));
+        }
+        return new BlobHeader(type, indexData, datasize);
+    }
+
+    /**
+     * Parse a blob from the PBF file
+     *
+     * @param header The header with the blob information (most critically, the length of the blob)
+     * @param cis    Used to ensure we don't read too much data
+     * @param parser The parser to read records from
+     * @param baos   The reusable output stream
+     * @return The blob to use elsewhere
+     * @throws IOException If one of the streams has an issue
+     */
+    @Nonnull
+    private Blob parseBlob(BlobHeader header, CountingInputStream cis, ProtobufParser parser, ByteArrayOutputStream baos) throws IOException {
+        long start = cis.getBytesRead();
+        int size = Integer.MIN_VALUE;
+        Blob.CompressionType type = null;
+        ProtobufRecord current = null;
+        while (parser.hasNext() && cis.getBytesRead() - start < header.dataSize()) {
+            current = new ProtobufRecord(baos, parser);
+            switch (current.getField()) {
+                case 1:
+                    type = Blob.CompressionType.raw;
+                    break;
+                case 2:
+                    size = current.asUnsignedVarInt().intValue();
+                    break;
+                case 3:
+                    type = Blob.CompressionType.zlib;
+                    break;
+                case 4:
+                    type = Blob.CompressionType.lzma;
+                    break;
+                case 5:
+                    type = Blob.CompressionType.bzip2;
+                    break;
+                case 6:
+                    type = Blob.CompressionType.lz4;
+                    break;
+                case 7:
+                    type = Blob.CompressionType.zstd;
+                    break;
+                default:
+                    throw new IllegalStateException("Unknown compression type: " + current.getField());
+            }
+        }
+        return new Blob(size, type, current.getBytes());
+    }
+
+    /**
+     * Parse a header block. This assumes that the parser has hit a string with the text "OSMHeader".
+     *
+     * @param blob The blob with the header block data
+     * @param baos The reusable output stream to use
+     * @return The parsed HeaderBlock
+     * @throws IOException if one of the {@link InputStream}s has a problem
+     */
+    @Nonnull
+    private HeaderBlock parseHeaderBlock(Blob blob, ByteArrayOutputStream baos) throws IOException {
+        try (InputStream blobInput = blob.inputStream();
+             ProtobufParser parser = new ProtobufParser(blobInput)) {
+            BBox bbox = null;
+            List<String> required = new ArrayList<>();
+            List<String> optional = new ArrayList<>();
+            String program = null;
+            String source = null;
+            Long osmosisReplicationTimestamp = null;
+            Long osmosisReplicationSequenceNumber = null;
+            String osmosisReplicationBaseUrl = null;
+            while (parser.hasNext()) {
+                final ProtobufRecord current = new ProtobufRecord(baos, parser);
+                switch (current.getField()) {
+                    case 1: // bbox
+                        bbox = parseBBox(baos, current);
+                        break;
+                    case 4: // repeated required features
+                        required.add(current.asString());
+                        break;
+                    case 5: // repeated optional features
+                        optional.add(current.asString());
+                        break;
+                    case 16: // writing program
+                        program = current.asString();
+                        break;
+                    case 17: // source
+                        source = current.asString();
+                        break;
+                    case 32: // osmosis replication timestamp
+                        osmosisReplicationTimestamp = current.asSignedVarInt().longValue();
+                        break;
+                    case 33: // osmosis replication sequence number
+                        osmosisReplicationSequenceNumber = current.asSignedVarInt().longValue();
+                        break;
+                    case 34: // osmosis replication base url
+                        osmosisReplicationBaseUrl = current.asString();
+                        break;
+                    default: // fall through -- unknown header block field
+                }
+            }
+            return new HeaderBlock(bbox, required.toArray(new String[0]), optional.toArray(new String[0]), program,
+                    source, osmosisReplicationTimestamp, osmosisReplicationSequenceNumber, osmosisReplicationBaseUrl);
+        }
+    }
+
+    /**
+     * Ensure that we support all the required features in the PBF
+     *
+     * @param headerBlock The HeaderBlock to check
+     * @throws IllegalDataException If there exists at least one feature that we do not support
+     */
+    private static void checkRequiredFeatures(HeaderBlock headerBlock) throws IllegalDataException {
+        Set<String> supportedFeatures = new HashSet<>(Arrays.asList("OsmSchema-V0.6", "DenseNodes", "HistoricalInformation"));
+        for (String requiredFeature : headerBlock.requiredFeatures()) {
+            if (!supportedFeatures.contains(requiredFeature)) {
+                throw new IllegalDataException("PBF Parser: Unknown required feature " + requiredFeature);
+            }
+        }
+    }
+
+    /**
+     * Parse a data blob (should be "OSMData")
+     *
+     * @param baos        The reusable stream
+     * @param headerBlock The header block with data source information
+     * @param blob        The blob to read OSM data from
+     * @throws IOException          if we don't support the compression type
+     * @throws IllegalDataException If an invalid OSM primitive was read
+     */
+    private void parseDataBlock(ByteArrayOutputStream baos, HeaderBlock headerBlock, Blob blob) throws IOException, IllegalDataException {
+        String[] stringTable = null; // field 1, note that stringTable[0] is a delimiter, so it is always blank and unused
+        // field 2 -- we cannot parse these live just in case the following fields come later
+        List<ProtobufRecord> primitiveGroups = new ArrayList<>();
+        int granularity = 100; // field 17
+        long latOffset = 0; // field 19
+        long lonOffset = 0; // field 20
+        int dateGranularity = 1000; // field 18, default is milliseconds since the 1970 epoch
+        try (InputStream inputStream = blob.inputStream();
+             ProtobufParser parser = new ProtobufParser(inputStream)) {
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1:
+                        stringTable = parseStringTable(baos, protobufRecord.getBytes());
+                        break;
+                    case 2:
+                        primitiveGroups.add(protobufRecord);
+                        break;
+                    case 17:
+                        granularity = protobufRecord.asUnsignedVarInt().intValue();
+                        break;
+                    case 18:
+                        dateGranularity = protobufRecord.asUnsignedVarInt().intValue();
+                        break;
+                    case 19:
+                        latOffset = protobufRecord.asUnsignedVarInt().longValue();
+                        break;
+                    case 20:
+                        lonOffset = protobufRecord.asUnsignedVarInt().longValue();
+                        break;
+                    default: // Pass, since someone might have extended the format
+                }
+            }
+        }
+        final PrimitiveBlockRecord primitiveBlockRecord = new PrimitiveBlockRecord(stringTable, granularity, latOffset, lonOffset,
+                dateGranularity);
+        final DataSet ds = getDataSet();
+        if (!primitiveGroups.isEmpty()) {
+            try {
+                ds.beginUpdate();
+                ds.addDataSource(new DataSource(new Bounds((LatLon) headerBlock.bbox().getMin(), (LatLon) headerBlock.bbox().getMax()),
+                        headerBlock.source()));
+            } finally {
+                ds.endUpdate();
+            }
+        }
+        for (ProtobufRecord primitiveGroup : primitiveGroups) {
+            try {
+                ds.beginUpdate();
+                parsePrimitiveGroup(baos, primitiveGroup.getBytes(), primitiveBlockRecord);
+            } finally {
+                ds.endUpdate();
+            }
+        }
+    }
+
+    /**
+     * This parses a bbox from a record (HeaderBBox message)
+     *
+     * @param baos    The reusable {@link ByteArrayOutputStream} to avoid unnecessary allocations
+     * @param current The current record
+     * @return The <i>immutable</i> bbox, or {@code null}
+     * @throws IOException If something happens with the {@link InputStream}s (probably won't happen)
+     */
+    @Nullable
+    private static BBox parseBBox(ByteArrayOutputStream baos, ProtobufRecord current) throws IOException {
+        try (ByteArrayInputStream bboxInputStream = new ByteArrayInputStream(current.getBytes());
+             ProtobufParser bboxParser = new ProtobufParser(bboxInputStream)) {
+            double left = Double.NaN;
+            double right = Double.NaN;
+            double top = Double.NaN;
+            double bottom = Double.NaN;
+            while (bboxParser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, bboxParser);
+                if (protobufRecord.getType() == WireType.VARINT) {
+                    double value = protobufRecord.asSignedVarInt().longValue() * NANO_DEGREES;
+                    switch (protobufRecord.getField()) {
+                        case 1:
+                            left = value;
+                            break;
+                        case 2:
+                            right = value;
+                            break;
+                        case 3:
+                            top = value;
+                            break;
+                        case 4:
+                            bottom = value;
+                            break;
+                        default: // Fall through -- someone might have extended the format
+                    }
+                }
+            }
+            if (!Double.isNaN(left) && !Double.isNaN(top) && !Double.isNaN(right) && !Double.isNaN(bottom)) {
+                return new BBox(left, top, right, bottom).toImmutable();
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Parse the string table
+     *
+     * @param baos  The reusable stream
+     * @param bytes The message bytes
+     * @return The parsed table (reminder: index 0 is empty, note that all strings are already interned by {@link String#intern()})
+     * @throws IOException if something happened while reading a {@link ByteArrayInputStream}
+     */
+    @Nonnull
+    private String[] parseStringTable(ByteArrayOutputStream baos, byte[] bytes) throws IOException {
+        try (ByteArrayInputStream is = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(is)) {
+            List<String> list = new ArrayList<>();
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                if (protobufRecord.getField() == 1) {
+                    list.add(protobufRecord.asString().intern()); // field is technically repeated bytes
+                }
+            }
+            return list.toArray(new String[0]);
+        }
+    }
+
+    /**
+     * Parse a PrimitiveGroup. Note: this parsing implementation doesn't check and make certain that all primitives in the group are the same
+     * type.
+     *
+     * @param baos                 The reusable stream
+     * @param bytes                The bytes to decode
+     * @param primitiveBlockRecord The record to use for creating the primitives
+     * @throws IllegalDataException if one of the primitive records was invalid
+     * @throws IOException          if something happened while reading a {@link ByteArrayInputStream}
+     */
+    private void parsePrimitiveGroup(ByteArrayOutputStream baos, byte[] bytes, PrimitiveBlockRecord primitiveBlockRecord)
+            throws IllegalDataException, IOException {
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1: // Nodes, repeated
+                        parseNode(baos, protobufRecord.getBytes(), primitiveBlockRecord);
+                        break;
+                    case 2: // Dense nodes, not repeated
+                        parseDenseNodes(baos, protobufRecord.getBytes(), primitiveBlockRecord);
+                        break;
+                    case 3: // Ways, repeated
+                        parseWay(baos, protobufRecord.getBytes(), primitiveBlockRecord);
+                        break;
+                    case 4: // relations, repeated
+                        parseRelation(baos, protobufRecord.getBytes(), primitiveBlockRecord);
+                        break;
+                    case 5: // Changesets, repeated
+                        // Skip -- we don't have a good way to store changeset information in JOSM
+                    default: // OSM PBF could be extended
+                }
+            }
+        }
+    }
+
+    /**
+     * Parse a singular node
+     *
+     * @param baos                 The reusable stream
+     * @param bytes                The bytes to decode
+     * @param primitiveBlockRecord The record to use (mostly for tags and lat/lon calculations)
+     * @throws IllegalDataException if the PBF did not provide all the data necessary for node creation
+     * @throws IOException          if something happened while reading a {@link ByteArrayInputStream}
+     */
+    private void parseNode(ByteArrayOutputStream baos, byte[] bytes, PrimitiveBlockRecord primitiveBlockRecord)
+            throws IllegalDataException, IOException {
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            long id = Long.MIN_VALUE;
+            List<String> keys = new ArrayList<>();
+            List<String> values = new ArrayList<>();
+            Info info = null;
+            long lat = Long.MIN_VALUE;
+            long lon = Long.MIN_VALUE;
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1:
+                        id = protobufRecord.asSignedVarInt().intValue();
+                        break;
+                    case 2:
+                        for (long number : new ProtobufPacked(protobufRecord.getBytes()).getArray()) {
+                            keys.add(primitiveBlockRecord.stringTable[(int) number]);
+                        }
+                        break;
+                    case 3:
+                        for (long number : new ProtobufPacked(protobufRecord.getBytes()).getArray()) {
+                            values.add(primitiveBlockRecord.stringTable[(int) number]);
+                        }
+                        break;
+                    case 4:
+                        info = parseInfo(baos, protobufRecord.getBytes());
+                        break;
+                    case 8:
+                        lat = protobufRecord.asSignedVarInt().longValue();
+                        break;
+                    case 9:
+                        lon = protobufRecord.asSignedVarInt().longValue();
+                        break;
+                    default: // Fall through -- PBF could be extended (unlikely)
+                }
+            }
+            if (id == Long.MIN_VALUE || lat == Long.MIN_VALUE || lon == Long.MIN_VALUE) {
+                throw new IllegalDataException("OSM PBF did not provide all the required node information");
+            }
+            NodeData node = new NodeData(id);
+            node.setCoor(calculateLatLon(primitiveBlockRecord, lat, lon));
+            addTags(node, keys, values);
+            if (info != null) {
+                setOsmPrimitiveData(primitiveBlockRecord, node, info);
+            }
+            buildPrimitive(node);
+        }
+    }
+
+    /**
+     * Parse dense nodes from a record
+     *
+     * @param baos                 The reusable output stream
+     * @param bytes                The bytes for the dense node
+     * @param primitiveBlockRecord Used for data that is common between several different objects.
+     * @throws IllegalDataException if the nodes could not be parsed, or one of the nodes would be malformed
+     * @throws IOException          if something happened while reading a {@link ByteArrayInputStream}
+     */
+    private void parseDenseNodes(ByteArrayOutputStream baos, byte[] bytes, PrimitiveBlockRecord primitiveBlockRecord)
+            throws IllegalDataException, IOException {
+        long[] ids = EMPTY_LONG;
+        long[] lats = EMPTY_LONG;
+        long[] lons = EMPTY_LONG;
+        long[] keyVals = EMPTY_LONG; // technically can be int
+        Info[] denseInfo = null;
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1: // packed node ids, DELTA encoded
+                        long[] tids = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        ids = joinArrays(ids, tids);
+                        break;
+                    case 5: // DenseInfo
+                        denseInfo = parseDenseInfo(baos, protobufRecord.getBytes()); // not repeated or packed
+                        break;
+                    case 8: // packed lat, DELTA encoded
+                        long[] tlats = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        lats = joinArrays(lats, tlats);
+                        break;
+                    case 9: // packed lon, DELTA encoded
+                        long[] tlons = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        lons = joinArrays(lons, tlons);
+                        break;
+                    case 10: // key_val mappings, packed. '0' used as separator between nodes
+                        long[] tkeyVal = new ProtobufPacked(protobufRecord.getBytes()).getArray();
+                        keyVals = joinArrays(keyVals, tkeyVal);
+                        break;
+                    default: // Someone might have extended the PBF format
+                }
+            }
+        }
+        int keyValIndex = 0; // This index must not reset between nodes, and must always increment
+        if (ids.length == lats.length && lats.length == lons.length && (denseInfo == null || denseInfo.length == lons.length)) {
+            long id = 0;
+            long lat = 0;
+            long lon = 0;
+            for (int i = 0; i < ids.length; i++) {
+                final NodeData node;
+                if (denseInfo != null) {
+                    Info info = denseInfo[i];
+                    id += ids[i];
+                    node = new NodeData(id);
+                    setOsmPrimitiveData(primitiveBlockRecord, node, info);
+                } else {
+                    node = new NodeData(ids[i]);
+                }
+                lat += lats[i];
+                lon += lons[i];
+                // Not very efficient when Node doesn't store the LatLon. Hopefully not too much of an issue
+                node.setCoor(calculateLatLon(primitiveBlockRecord, lat, lon));
+                String key = null;
+                while (keyValIndex < keyVals.length) {
+                    int stringIndex = (int) keyVals[keyValIndex];
+                    // StringTable[0] is always an empty string, and acts as a separator between the tags of different nodes here
+                    if (stringIndex != 0) {
+                        if (key == null) {
+                            key = primitiveBlockRecord.stringTable[stringIndex];
+                        } else {
+                            node.put(key, primitiveBlockRecord.stringTable[stringIndex]);
+                            key = null;
+                        }
+                        keyValIndex++;
+                    } else {
+                        keyValIndex++;
+                        break;
+                    }
+                }
+                // Just add the nodes as we make them -- avoid creating another list that expands every time we parse a node
+                buildPrimitive(node);
+            }
+        } else {
+            throw new IllegalDataException("OSM PBF has mismatched DenseNode lengths");
+        }
+    }
+
+    /**
+     * Parse a way from the PBF
+     *
+     * @param baos                 The reusable stream
+     * @param bytes                The bytes for the way
+     * @param primitiveBlockRecord Used for common information, like tags
+     * @throws IllegalDataException if an invalid way could have been created
+     * @throws IOException          if something happened while reading a {@link ByteArrayInputStream}
+     */
+    private void parseWay(ByteArrayOutputStream baos, byte[] bytes, PrimitiveBlockRecord primitiveBlockRecord)
+            throws IllegalDataException, IOException {
+        long id = Long.MIN_VALUE;
+        List<String> keys = new ArrayList<>();
+        List<String> values = new ArrayList<>();
+        Info info = null;
+        long[] refs = EMPTY_LONG; // DELTA encoded
+        // We don't do live drawing, so we don't care about lats and lons (we essentially throw them away with the current parser)
+        // This is for the optional feature "LocationsOnWays"
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1:
+                        id = protobufRecord.asUnsignedVarInt().intValue();
+                        break;
+                    case 2:
+                        for (long number : new ProtobufPacked(protobufRecord.getBytes()).getArray()) {
+                            keys.add(primitiveBlockRecord.stringTable[(int) number]);
+                        }
+                        break;
+                    case 3:
+                        for (long number : new ProtobufPacked(protobufRecord.getBytes()).getArray()) {
+                            values.add(primitiveBlockRecord.stringTable[(int) number]);
+                        }
+                        break;
+                    case 4:
+                        info = parseInfo(baos, protobufRecord.getBytes());
+                        break;
+                    case 8:
+                        long[] tRefs = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        refs = joinArrays(refs, tRefs);
+                        break;
+                    // case 9 and 10 are for "LocationsOnWays" -- this is only usable if we can create the way geometry directly
+                    // if this is ever supported, lats = joinArrays(lats, decodePackedSInt64(...))
+                    default: // PBF could be expanded by other people
+                }
+            }
+        }
+        if (refs.length == 0 || id == Long.MIN_VALUE) {
+            throw new IllegalDataException("A way with either no id or no nodes was found");
+        }
+        WayData wayData = new WayData(id);
+        List<Long> nodeIds = new ArrayList<>(refs.length);
+        long ref = 0;
+        for (long tRef : refs) {
+            ref += tRef;
+            nodeIds.add(ref);
+        }
+        this.ways.put(wayData.getUniqueId(), nodeIds);
+        addTags(wayData, keys, values);
+        if (info != null) {
+            setOsmPrimitiveData(primitiveBlockRecord, wayData, info);
+        }
+        buildPrimitive(wayData);
+    }
+
+    /**
+     * Parse a relation from a PBF
+     *
+     * @param baos                 The reusable stream
+     * @param bytes                The bytes to use
+     * @param primitiveBlockRecord Mostly used for tags
+     * @throws IllegalDataException if the PBF had a bad relation definition
+     * @throws IOException          if something happened while reading a {@link ByteArrayInputStream}
+     */
+    @Nonnull
+    private void parseRelation(ByteArrayOutputStream baos, byte[] bytes, PrimitiveBlockRecord primitiveBlockRecord)
+            throws IllegalDataException, IOException {
+        long id = Long.MIN_VALUE;
+        List<String> keys = new ArrayList<>();
+        List<String> values = new ArrayList<>();
+        Info info = null;
+        long[] rolesStringId = EMPTY_LONG; // Technically int
+        long[] memids = EMPTY_LONG;
+        long[] types = EMPTY_LONG; // Technically an enum
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1:
+                        id = protobufRecord.asUnsignedVarInt().intValue();
+                        break;
+                    case 2:
+                        for (long number : new ProtobufPacked(protobufRecord.getBytes()).getArray()) {
+                            keys.add(primitiveBlockRecord.stringTable[(int) number]);
+                        }
+                        break;
+                    case 3:
+                        for (long number : new ProtobufPacked(protobufRecord.getBytes()).getArray()) {
+                            values.add(primitiveBlockRecord.stringTable[(int) number]);
+                        }
+                        break;
+                    case 4:
+                        info = parseInfo(baos, protobufRecord.getBytes());
+                        break;
+                    case 8:
+                        long[] tRoles = new ProtobufPacked(protobufRecord.getBytes()).getArray();
+                        rolesStringId = joinArrays(rolesStringId, tRoles);
+                        break;
+                    case 9:
+                        long[] tMemids = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        memids = joinArrays(memids, tMemids);
+                        break;
+                    case 10:
+                        long[] tTypes = new ProtobufPacked(protobufRecord.getBytes()).getArray();
+                        types = joinArrays(types, tTypes);
+                        break;
+                    default: // Fall through for PBF extensions
+                }
+            }
+        }
+        if (keys.size() != values.size() || rolesStringId.length != memids.length || memids.length != types.length || id == Long.MIN_VALUE) {
+            throw new IllegalDataException("OSM PBF contains a bad relation definition");
+        }
+        RelationData data = new RelationData(id);
+        if (info != null) {
+            setOsmPrimitiveData(primitiveBlockRecord, data, info);
+        }
+        addTags(data, keys, values);
+        OsmPrimitiveType[] valueTypes = OsmPrimitiveType.values();
+        List<RelationMemberData> members = new ArrayList<>(rolesStringId.length);
+        long memberId = 0;
+        for (int i = 0; i < rolesStringId.length; i++) {
+            String role = primitiveBlockRecord.stringTable[(int) rolesStringId[i]];
+            memberId += memids[i];
+            OsmPrimitiveType type = valueTypes[(int) types[i]];
+            members.add(new RelationMemberData(role, type, memberId));
+        }
+        this.relations.put(data.getUniqueId(), members);
+        buildPrimitive(data);
+    }
+
+    /**
+     * Parse info for an object
+     *
+     * @param baos  The reusable stream to use
+     * @param bytes The bytes to decode
+     * @return The info for an object
+     * @throws IOException if something happened while reading a {@link ByteArrayInputStream}
+     */
+    @Nonnull
+    private Info parseInfo(ByteArrayOutputStream baos, byte[] bytes) throws IOException {
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            int version = -1;
+            Long timestamp = null;
+            Long changeset = null;
+            Integer uid = null;
+            Integer userSid = null;
+            boolean visible = true;
+            while (parser.hasNext()) {
+                ProtobufRecord record = new ProtobufRecord(baos, parser);
+                switch (record.getField()) {
+                    case 1:
+                        version = record.asUnsignedVarInt().intValue();
+                        break;
+                    case 2:
+                        timestamp = record.asUnsignedVarInt().longValue();
+                        break;
+                    case 3:
+                        changeset = record.asUnsignedVarInt().longValue();
+                        break;
+                    case 4:
+                        uid = record.asUnsignedVarInt().intValue();
+                        break;
+                    case 5:
+                        userSid = record.asUnsignedVarInt().intValue();
+                        break;
+                    case 6:
+                        visible = record.asUnsignedVarInt().byteValue() == 0;
+                        break;
+                    default: // Fall through, since the PBF format could be extended
+                }
+            }
+            return new Info(version, timestamp, changeset, uid, userSid, visible);
+        }
+    }
+
+    /**
+     * Calculate the actual lat lon
+     *
+     * @param primitiveBlockRecord The record with offset and granularity data
+     * @param lat                  The latitude from the PBF
+     * @param lon                  The longitude from the PBF
+     * @return The actual {@link LatLon}, accounting for PBF offset and granularity changes
+     */
+    @Nonnull
+    private static LatLon calculateLatLon(PrimitiveBlockRecord primitiveBlockRecord, long lat, long lon) {
+        return new LatLon(NANO_DEGREES * (primitiveBlockRecord.latOffset + (primitiveBlockRecord.granularity * lat)),
+                NANO_DEGREES * (primitiveBlockRecord.lonOffset + (primitiveBlockRecord.granularity * lon)));
+    }
+
+    /**
+     * Add a set of tags to a primitive
+     *
+     * @param primitive The primitive to add tags to
+     * @param keys      The keys (must match the size of the values)
+     * @param values    The values (must match the size of the keys)
+     */
+    private static void addTags(Tagged primitive, List<String> keys, List<String> values) {
+        if (keys.isEmpty()) {
+            return;
+        }
+        Map<String, String> tagMap = new HashMap<>(keys.size());
+        for (int i = 0; i < keys.size(); i++) {
+            tagMap.put(keys.get(i), values.get(i));
+        }
+        primitive.putAll(tagMap);
+    }
+
+    /**
+     * Set the primitive data for an object
+     *
+     * @param primitiveBlockRecord The record with data for the current primitive (currently uses {@link PrimitiveBlockRecord#stringTable} and
+     *                             {@link PrimitiveBlockRecord#dateGranularity}).
+     * @param primitive            The primitive to add the information to
+     * @param info                 The specific info for the primitive
+     */
+    private static void setOsmPrimitiveData(PrimitiveBlockRecord primitiveBlockRecord, PrimitiveData primitive, Info info) {
+        if (info.changeset() != null) {
+            primitive.setChangesetId(Math.toIntExact(info.changeset()));
+        }
+        primitive.setVisible(info.isVisible());
+        if (info.timestamp() != null) {
+            primitive.setRawTimestamp(Math.toIntExact(info.timestamp() * primitiveBlockRecord.dateGranularity / 1000));
+        }
+        if (info.uid() != null && info.userSid() != null) {
+            primitive.setUser(User.createOsmUser(info.uid(), primitiveBlockRecord.stringTable[info.userSid()]));
+        } else if (info.uid() != null) {
+            primitive.setUser(User.getById(info.uid()));
+        }
+        if (info.version() > 0) {
+            primitive.setVersion(info.version());
+        }
+    }
+
+    /**
+     * Convert an array of numbers to an array of longs, decoded from uint (zig zag decoded)
+     *
+     * @param numbers The numbers to convert
+     * @return The long array (the same array that was passed in)
+     */
+    @Nonnull
+    private static long[] decodePackedSInt64(long[] numbers) {
+        for (int i = 0; i < numbers.length; i++) {
+            numbers[i] = ProtobufParser.decodeZigZag(numbers[i]);
+        }
+        return numbers;
+    }
+
+    /**
+     * Join two different arrays
+     *
+     * @param array1 The first array
+     * @param array2 The second array
+     * @return The joined arrays -- may return one of the original arrays, if the other is empty
+     */
+    @Nonnull
+    private static long[] joinArrays(long[] array1, long[] array2) {
+        if (array1.length == 0) {
+            return array2;
+        }
+        if (array2.length == 0) {
+            return array1;
+        }
+        long[] result = Arrays.copyOf(array1, array1.length + array2.length);
+        System.arraycopy(array2, 0, result, array1.length, array2.length);
+        return result;
+    }
+
+    /**
+     * Parse dense info
+     *
+     * @param baos  The reusable stream
+     * @param bytes The bytes to decode
+     * @return The dense info array
+     * @throws IllegalDataException If the data has mismatched array lengths
+     * @throws IOException          if something happened while reading a {@link ByteArrayInputStream}
+     */
+    @Nonnull
+    private Info[] parseDenseInfo(ByteArrayOutputStream baos, byte[] bytes) throws IllegalDataException, IOException {
+        long[] version = EMPTY_LONG; // technically ints
+        long[] timestamp = EMPTY_LONG;
+        long[] changeset = EMPTY_LONG;
+        long[] uid = EMPTY_LONG; // technically int
+        long[] userSid = EMPTY_LONG; // technically int
+        long[] visible = EMPTY_LONG; // optional, true if not set, technically booleans
+        try (ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+             ProtobufParser parser = new ProtobufParser(bais)) {
+            while (parser.hasNext()) {
+                ProtobufRecord protobufRecord = new ProtobufRecord(baos, parser);
+                switch (protobufRecord.getField()) {
+                    case 1:
+                        long[] tVersion = new ProtobufPacked(protobufRecord.getBytes()).getArray();
+                        version = joinArrays(version, tVersion);
+                        break;
+                    case 2:
+                        long[] tTimestamp = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        timestamp = joinArrays(timestamp, tTimestamp);
+                        break;
+                    case 3:
+                        long[] tChangeset = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        changeset = joinArrays(changeset, tChangeset);
+                        break;
+                    case 4:
+                        long[] tUid = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        uid = joinArrays(uid, tUid);
+                        break;
+                    case 5:
+                        long[] tUserSid = decodePackedSInt64(new ProtobufPacked(protobufRecord.getBytes()).getArray());
+                        userSid = joinArrays(userSid, tUserSid);
+                        break;
+                    case 6:
+                        long[] tVisible = new ProtobufPacked(protobufRecord.getBytes()).getArray();
+                        visible = joinArrays(visible, tVisible);
+                        break;
+                    default: // Fall through
+                }
+            }
+        }
+        if (version.length == timestamp.length && timestamp.length == changeset.length && changeset.length == uid.length &&
+                uid.length == userSid.length && (visible == EMPTY_LONG || visible.length == userSid.length)) {
+            Info[] infos = new Info[version.length];
+            long lastTimestamp = 0; // delta encoded
+            long lastChangeset = 0; // delta encoded
+            long lastUid = 0; // delta encoded,
+            long lastUserSid = 0; // delta encoded, string id for username
+            for (int i = 0; i < version.length; i++) {
+                lastTimestamp += timestamp[i];
+                lastChangeset += changeset[i];
+                lastUid += uid[i];
+                lastUserSid += userSid[i];
+                infos[i] = new Info((int) version[i], lastTimestamp, lastChangeset, (int) lastUid, (int) lastUserSid,
+                        visible == EMPTY_LONG || visible[i] == 1);
+            }
+            return infos;
+        }
+        throw new IllegalDataException("OSM PBF has mismatched DenseInfo lengths");
+    }
+
+    /**
+     * A record class for passing PrimitiveBlock information to the PrimitiveGroup parser
+     */
+    private static final class PrimitiveBlockRecord {
+        private final String[] stringTable;
+        private final int granularity;
+        private final long latOffset;
+        private final long lonOffset;
+        private final int dateGranularity;
+
+        /**
+         * Create a new record
+         *
+         * @param stringTable     The string table (reminder: 0 index is empty, as it is used by DenseNode to separate node tags)
+         * @param granularity     units of nanodegrees, used to store coordinates
+         * @param latOffset       offset value between the output coordinates and the granularity grid in units of nanodegrees
+         * @param lonOffset       offset value between the output coordinates and the granularity grid in units of nanodegrees
+         * @param dateGranularity Granularity of dates, normally represented in units of milliseconds since the 1970 epoch
+         */
+        PrimitiveBlockRecord(String[] stringTable, int granularity, long latOffset, long lonOffset,
+                             int dateGranularity) {
+            this.stringTable = stringTable;
+            this.granularity = granularity;
+            this.latOffset = latOffset;
+            this.lonOffset = lonOffset;
+            this.dateGranularity = dateGranularity;
+        }
+
+    }
+}
Index: trunk/src/org/openstreetmap/josm/io/OsmReader.java
===================================================================
--- trunk/src/org/openstreetmap/josm/io/OsmReader.java	(revision 18694)
+++ trunk/src/org/openstreetmap/josm/io/OsmReader.java	(revision 18695)
@@ -502,5 +502,5 @@
     @Override
     protected DataSet doParseDataSet(InputStream source, ProgressMonitor progressMonitor) throws IllegalDataException {
-        return doParseDataSet(source, progressMonitor, ir -> {
+        return doParseDataSet(source, progressMonitor, (ParserWorker) ir -> {
             try {
                 setParser(XmlUtils.newSafeXMLInputFactory().createXMLStreamReader(ir));
