Index: src/org/wikipedia/WikipediaApp.java
===================================================================
--- src/org/wikipedia/WikipediaApp.java	(revision 33095)
+++ src/org/wikipedia/WikipediaApp.java	(working copy)
@@ -228,10 +228,35 @@
                 }))
                 .values()
                 .stream()
-                .flatMap(chunk -> getWikidataForArticles0(chunk).entrySet().stream())
+                .flatMap(chunk -> resolveWikidataItems(chunk).entrySet().stream())
                 .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
     }
 
+    /**
+     * Get Wikidata IDs. For any unknown IDs, resolve them (normalize and get redirects),
+     * and try getting Wikidata IDs again
+     */
+    private Map<String, String> resolveWikidataItems(List<String> articles) {
+        final Map<String, String> result = getWikidataForArticles0(articles);
+        List<String> unresolved = new ArrayList<>();
+        for (String title : articles) {
+            if (!result.containsKey(title)) {
+                unresolved.add(title);
+            }
+        }
+        if (!unresolved.isEmpty()) {
+            final Map<String, String> unresmap = resolveRedirectsForArticles(unresolved);
+            final Map<String, String> result2 = getWikidataForArticles0(new ArrayList<>(unresmap.values()));
+            for (String original : unresmap.keySet()) {
+                final String resolved = unresmap.get(original);
+                if (result2.containsKey(resolved)) {
+                    result.put(original, result2.get(resolved));
+                }
+            }
+        }
+        return result;
+    }
+
     private Map<String, String> getWikidataForArticles0(List<String> articles) {
         if (articles.isEmpty()) {
             return Collections.emptyMap();
@@ -261,6 +286,44 @@
         }
     }
 
+    /**
+     * Given a list of wikipedia titles, returns a map of corresponding normalized title names,
+     * or if the title is a redirect page, the result is the redirect target.
+     * @todo we should also use this function to normalize existing "wikipedia" tags
+     */
+    private Map<String, String> resolveRedirectsForArticles(List<String> articles) {
+        try {
+            final String url = "https://" + wikipediaLang + ".wikipedia.org/w/api.php" +
+                    "?action=query" +
+                    "&redirects" +
+                    "&format=xml" +
+                    "&titles=" + articles.stream().map(Utils::encodeUrl).collect(Collectors.joining("|"));
+            final Map<String, String> result = new TreeMap<>(), result2 = new TreeMap<>();
+            try (final InputStream in = connect(url).getContent()) {
+                final Document xml = newDocumentBuilder().parse(in);
+
+                // Add both redirects and normalization results to the same map
+                X_PATH.evaluateNodes("//r", xml).forEach(node -> {
+                    result.put(X_PATH.evaluateString("./@from", node), X_PATH.evaluateString("./@to", node));
+                });
+                X_PATH.evaluateNodes("//n", xml).forEach(node -> {
+                    final String to = X_PATH.evaluateString("./@to", node);
+                    result.put(X_PATH.evaluateString("./@from", node), result.getOrDefault(to, to));
+                });
+            }
+            // We should only return those keys that were originally requested, excluding titles
+            // that are both normalized and redirected
+            for (String title : articles) {
+                if (result.containsKey(title)) {
+                    result2.put(title, result.get(title));
+                }
+            }
+            return result2;
+        } catch (Exception ex) {
+            throw new RuntimeException(ex);
+        }
+    }
+
     public List<String> getCategoriesForPrefix(final String prefix) {
         try {
             final String url = getSiteUrl() + "/w/api.php"
