diff --git a/Cargo.lock b/Cargo.lock
index d23b6b27e21fc..8df87fadca4dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -622,9 +622,9 @@ dependencies = [
"itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -720,9 +720,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ident_case 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -731,8 +731,8 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"darling_core 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -898,9 +898,9 @@ name = "failure_derive"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1478,8 +1478,8 @@ dependencies = [
name = "malloc_size_of_derive"
version = "0.0.1"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1766,8 +1766,8 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1954,7 +1954,7 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "0.4.9"
+version = "0.4.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2005,10 +2005,10 @@ dependencies = [
[[package]]
name = "quote"
-version = "0.6.3"
+version = "0.6.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2221,9 +2221,9 @@ name = "scroll_derive"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2277,9 +2277,9 @@ name = "serde_derive"
version = "1.0.80"
source = "git+https://github.com/servo/serde?branch=deserialize_from_enums9#e0cc925c259cb74ce41377e4fe02713adfa6d836"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2390,8 +2390,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2465,9 +2465,9 @@ name = "style_derive"
version = "0.0.1"
dependencies = [
"darling 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2521,18 +2521,18 @@ name = "syn"
version = "0.14.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
-version = "0.15.7"
+version = "0.15.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2541,9 +2541,9 @@ name = "synstructure"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3354,11 +3354,11 @@ dependencies = [
"checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
"checksum proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "77997c53ae6edd6d187fec07ec41b207063b5ee6f33680e9fa86d405cdd313d4"
-"checksum proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "cccdc7557a98fe98453030f077df7f3a042052fae465bb61d2c2c41435cfd9b6"
+"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
"checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260"
"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
"checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
-"checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
+"checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c"
"checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1"
"checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd"
"checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d"
@@ -3406,7 +3406,7 @@ dependencies = [
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"
"checksum syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4e4b5274d4a0a3d2749d5c158dc64d3403e60554dc61194648787ada5212473d"
-"checksum syn 0.15.7 (registry+https://github.com/rust-lang/crates.io-index)" = "455a6ec9b368f8c479b0ae5494d13b22dc00990d2f00d68c9dc6a2dc4f17f210"
+"checksum syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)" = "734ecc29cd36e8123850d9bf21dfd62ef8300aaa8f879aabaa899721808be37c"
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
"checksum target-lexicon 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4af5e2227f0b887d591d3724b796a96eff04226104d872f5b3883fcd427d64b9"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
diff --git a/browser/base/content/tabbrowser.xml b/browser/base/content/tabbrowser.xml
index c74bcac049d2b..048eec62b43b2 100644
--- a/browser/base/content/tabbrowser.xml
+++ b/browser/base/content/tabbrowser.xml
@@ -34,7 +34,7 @@
// - for vertical orientation
// - corresponding to an overflow event that we ignored
let tabs = document.getBindingParent(this);
- if (event.originalTarget != this._scrollbox ||
+ if (event.originalTarget != this.scrollbox ||
event.detail == 0 ||
!tabs.hasAttribute("overflow")) {
return;
@@ -56,7 +56,7 @@
// Ignore overflow events:
// - from nested scrollable elements
// - for vertical orientation
- if (event.originalTarget != this._scrollbox ||
+ if (event.originalTarget != this.scrollbox ||
event.detail == 0) {
return;
}
@@ -634,7 +634,7 @@
let leftMovingTabScreenX = movingTabs[0].boxObject.screenX;
let translateX = screenX - draggedTab._dragData.screenX;
if (!pinned) {
- translateX += this.arrowScrollbox._scrollbox.scrollLeft - draggedTab._dragData.scrollX;
+ translateX += this.arrowScrollbox.scrollbox.scrollLeft - draggedTab._dragData.scrollX;
}
let leftBound = leftTab.boxObject.screenX - leftMovingTabScreenX;
let rightBound = (rightTab.boxObject.screenX + rightTab.boxObject.width) -
@@ -1555,7 +1555,7 @@
tab._dragData = {
offsetX: event.screenX - window.screenX - tabOffsetX,
offsetY: event.screenY - window.screenY,
- scrollX: this.arrowScrollbox._scrollbox.scrollLeft,
+ scrollX: this.arrowScrollbox.scrollbox.scrollLeft,
screenX: event.screenX,
movingTabs: (tab.multiselected ? gBrowser.selectedTabs : [tab])
.filter(t => t.pinned == tab.pinned),
diff --git a/browser/base/content/test/forms/browser_selectpopup.js b/browser/base/content/test/forms/browser_selectpopup.js
index 44c6fbb148905..40c771a550286 100644
--- a/browser/base/content/test/forms/browser_selectpopup.js
+++ b/browser/base/content/test/forms/browser_selectpopup.js
@@ -462,51 +462,64 @@ async function performLargePopupTests(win) {
// Check if a drag-select works and scrolls the list.
await openSelectPopup(selectPopup, "mousedown", "select", win);
- let scrollPos = selectPopup.scrollBox.scrollTop;
+ let getScrollPos = () => selectPopup.scrollBox.scrollbox.scrollTop;
+ let scrollPos = getScrollPos();
let popupRect = selectPopup.getBoundingClientRect();
// First, check that scrolling does not occur when the mouse is moved over the
// anchor button but not the popup yet.
EventUtils.synthesizeMouseAtPoint(popupRect.left + 5, popupRect.top - 10, { type: "mousemove" }, win);
- is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position after mousemove over button should not change");
+ is(getScrollPos(), scrollPos, "scroll position after mousemove over button should not change");
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top + 10, { type: "mousemove" }, win);
// Dragging above the popup scrolls it up.
+ let scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+ () => getScrollPos() < scrollPos - 5);
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" }, win);
- ok(selectPopup.scrollBox.scrollTop < scrollPos - 5, "scroll position at drag up");
+ await scrolledPromise;
+ ok(true, "scroll position at drag up");
// Dragging below the popup scrolls it down.
- scrollPos = selectPopup.scrollBox.scrollTop;
+ scrollPos = getScrollPos();
+ scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+ () => getScrollPos() > scrollPos + 5);
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
- ok(selectPopup.scrollBox.scrollTop > scrollPos + 5, "scroll position at drag down");
+ await scrolledPromise;
+ ok(true, "scroll position at drag down");
// Releasing the mouse button and moving the mouse does not change the scroll position.
- scrollPos = selectPopup.scrollBox.scrollTop;
+ scrollPos = getScrollPos();
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" }, win);
- is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mouseup should not change");
+ is(getScrollPos(), scrollPos, "scroll position at mouseup should not change");
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
- is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mousemove after mouseup should not change");
+ is(getScrollPos(), scrollPos, "scroll position at mousemove after mouseup should not change");
// Now check dragging with a mousedown on an item
let menuRect = selectPopup.children[51].getBoundingClientRect();
EventUtils.synthesizeMouseAtPoint(menuRect.left + 5, menuRect.top + 5, { type: "mousedown" }, win);
// Dragging below the popup scrolls it down.
+ scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+ () => getScrollPos() > scrollPos + 5);
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
- ok(selectPopup.scrollBox.scrollTop > scrollPos + 5, "scroll position at drag down from option");
+ await scrolledPromise;
+ ok(true, "scroll position at drag down from option");
// Dragging above the popup scrolls it up.
+ scrolledPromise = BrowserTestUtils.waitForEvent(selectPopup, "scroll", false,
+ () => getScrollPos() < scrollPos - 5);
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.top - 20, { type: "mousemove" }, win);
- is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at drag up from option");
+ await scrolledPromise;
+ ok(true, "scroll position at drag up from option");
- scrollPos = selectPopup.scrollBox.scrollTop;
+ scrollPos = getScrollPos();
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 25, { type: "mouseup" }, win);
- is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mouseup from option should not change");
+ is(getScrollPos(), scrollPos, "scroll position at mouseup from option should not change");
EventUtils.synthesizeMouseAtPoint(popupRect.left + 20, popupRect.bottom + 20, { type: "mousemove" }, win);
- is(selectPopup.scrollBox.scrollTop, scrollPos, "scroll position at mousemove after mouseup should not change");
+ is(getScrollPos(), scrollPos, "scroll position at mousemove after mouseup should not change");
await hideSelectPopup(selectPopup, "escape", win);
diff --git a/browser/base/content/test/tabs/browser_overflowScroll.js b/browser/base/content/test/tabs/browser_overflowScroll.js
index 37cb9ac12fdb6..1103b401431f4 100644
--- a/browser/base/content/test/tabs/browser_overflowScroll.js
+++ b/browser/base/content/test/tabs/browser_overflowScroll.js
@@ -8,7 +8,7 @@ requestLongerTimeout(2);
*/
add_task(async function() {
let arrowScrollbox = gBrowser.tabContainer.arrowScrollbox;
- let scrollbox = arrowScrollbox._scrollbox;
+ let scrollbox = arrowScrollbox.scrollbox;
let originalSmoothScroll = arrowScrollbox.smoothScroll;
let tabs = gBrowser.tabs;
let tabMinWidth = parseInt(getComputedStyle(gBrowser.selectedTab, null).minWidth);
diff --git a/dom/base/nsImageLoadingContent.h b/dom/base/nsImageLoadingContent.h
index d590421d6658d..3424eb594e4f2 100644
--- a/dom/base/nsImageLoadingContent.h
+++ b/dom/base/nsImageLoadingContent.h
@@ -200,8 +200,6 @@ class nsImageLoadingContent : public nsIImageLoadingContent {
*/
void DestroyImageLoadingContent();
- void ClearBrokenState() { mBroken = false; }
-
/**
* Returns the CORS mode that will be used for all future image loads. The
* default implementation returns CORS_NONE unconditionally.
diff --git a/dom/html/HTMLImageElement.cpp b/dom/html/HTMLImageElement.cpp
index 7abef966389bc..0cab0f2b470c6 100644
--- a/dom/html/HTMLImageElement.cpp
+++ b/dom/html/HTMLImageElement.cpp
@@ -525,11 +525,6 @@ nsresult HTMLImageElement::BindToTree(Document* aDocument, nsIContent* aParent,
// initaiated by a user interaction.
mUseUrgentStartForChannel = EventStateManager::IsHandlingUserInput();
- // FIXME: Bug 660963 it would be nice if we could just have
- // ClearBrokenState update our state and do it fast...
- ClearBrokenState();
- RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
-
// We still act synchronously for the non-responsive case (Bug
// 1076583), but still need to delay if it is unsafe to run
// script.
diff --git a/dom/html/HTMLInputElement.cpp b/dom/html/HTMLInputElement.cpp
index 57da3964924da..6b6b23ca28707 100644
--- a/dom/html/HTMLInputElement.cpp
+++ b/dom/html/HTMLInputElement.cpp
@@ -4339,10 +4339,6 @@ nsresult HTMLInputElement::BindToTree(Document* aDocument, nsIContent* aParent,
// initaiated by a user interaction.
mUseUrgentStartForChannel = EventStateManager::IsHandlingUserInput();
- // FIXME: Bug 660963 it would be nice if we could just have
- // ClearBrokenState update our state and do it fast...
- ClearBrokenState();
- RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
nsContentUtils::AddScriptRunner(
NewRunnableMethod("dom::HTMLInputElement::MaybeLoadImage", this,
&HTMLInputElement::MaybeLoadImage));
diff --git a/dom/html/reftests/bug1512297-ref.html b/dom/html/reftests/bug1512297-ref.html
new file mode 100644
index 0000000000000..45026e86cce7a
--- /dev/null
+++ b/dom/html/reftests/bug1512297-ref.html
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/dom/html/reftests/bug1512297.html b/dom/html/reftests/bug1512297.html
new file mode 100644
index 0000000000000..55d8d4564f701
--- /dev/null
+++ b/dom/html/reftests/bug1512297.html
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
diff --git a/dom/html/reftests/reftest.list b/dom/html/reftests/reftest.list
index a827b47dc46df..602e2e0267fe4 100644
--- a/dom/html/reftests/reftest.list
+++ b/dom/html/reftests/reftest.list
@@ -64,6 +64,8 @@ pref(permissions.default.image,2) HTTP == bug1196784-with-srcset.html bug1196784
# Test video with rotation information can be rotated.
== bug1228601-video-rotation-90.html bug1228601-video-rotated-ref.html
+== bug1512297.html bug1512297-ref.html
+
# Test that dynamically setting body margin attributes updates style appropriately
== body-topmargin-dynamic.html body-topmargin-ref.html
diff --git a/dom/svg/SVGFEImageElement.cpp b/dom/svg/SVGFEImageElement.cpp
index f0e218c27e544..835d70365bcca 100644
--- a/dom/svg/SVGFEImageElement.cpp
+++ b/dom/svg/SVGFEImageElement.cpp
@@ -142,10 +142,6 @@ nsresult SVGFEImageElement::BindToTree(Document* aDocument, nsIContent* aParent,
if (mStringAttributes[HREF].IsExplicitlySet() ||
mStringAttributes[XLINK_HREF].IsExplicitlySet()) {
- // FIXME: Bug 660963 it would be nice if we could just have
- // ClearBrokenState update our state and do it fast...
- ClearBrokenState();
- RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
nsContentUtils::AddScriptRunner(
NewRunnableMethod("dom::SVGFEImageElement::MaybeLoadSVGImage", this,
&SVGFEImageElement::MaybeLoadSVGImage));
diff --git a/dom/svg/SVGImageElement.cpp b/dom/svg/SVGImageElement.cpp
index e3ae7f94de24f..fe294516426e2 100644
--- a/dom/svg/SVGImageElement.cpp
+++ b/dom/svg/SVGImageElement.cpp
@@ -190,10 +190,6 @@ nsresult SVGImageElement::BindToTree(Document* aDocument, nsIContent* aParent,
if (mStringAttributes[HREF].IsExplicitlySet() ||
mStringAttributes[XLINK_HREF].IsExplicitlySet()) {
- // FIXME: Bug 660963 it would be nice if we could just have
- // ClearBrokenState update our state and do it fast...
- ClearBrokenState();
- RemoveStatesSilently(NS_EVENT_STATE_BROKEN);
nsContentUtils::AddScriptRunner(
NewRunnableMethod("dom::SVGImageElement::MaybeLoadSVGImage", this,
&SVGImageElement::MaybeLoadSVGImage));
diff --git a/layout/xul/nsMenuFrame.cpp b/layout/xul/nsMenuFrame.cpp
index 1687d2a569a2b..f509dd1e4b425 100644
--- a/layout/xul/nsMenuFrame.cpp
+++ b/layout/xul/nsMenuFrame.cpp
@@ -1208,8 +1208,7 @@ bool nsMenuFrame::SizeToPopup(nsBoxLayoutState& aState, nsSize& aSize) {
// if there is a scroll frame, add the desired width of the scrollbar as
// well
- nsIScrollableFrame* scrollFrame =
- do_QueryFrame(popupFrame->PrincipalChildList().FirstChild());
+ nsIScrollableFrame* scrollFrame = popupFrame->GetScrollFrame(popupFrame);
nscoord scrollbarWidth = 0;
if (scrollFrame) {
scrollbarWidth =
diff --git a/layout/xul/nsMenuPopupFrame.cpp b/layout/xul/nsMenuPopupFrame.cpp
index 8e5b29b849766..9aeb5faf8280e 100644
--- a/layout/xul/nsMenuPopupFrame.cpp
+++ b/layout/xul/nsMenuPopupFrame.cpp
@@ -1048,8 +1048,7 @@ nsPoint nsMenuPopupFrame::AdjustPositionForAnchorAlign(nsRect& anchorRect,
nsIFrame* selectedItemFrame = GetSelectedItemForAlignment();
if (selectedItemFrame) {
int32_t scrolly = 0;
- nsIScrollableFrame* scrollframe =
- do_QueryFrame(nsBox::GetChildXULBox(this));
+ nsIScrollableFrame* scrollframe = GetScrollFrame(this);
if (scrollframe) {
scrolly = scrollframe->GetScrollPosition().y;
}
diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
index 239a145220d44..f87951f773e94 100644
--- a/third_party/rust/proc-macro2/.cargo-checksum.json
+++ b/third_party/rust/proc-macro2/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"d271b6306e4b9e51b642d67ca9c35f6f32d582eb549da89085799aadc9bcc626","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"261fb7bbe050bbff8a8e33da68926b44cd1bbd2b1e8b655d19ae681b8fff3c6e","src/lib.rs":"b7483dd58c6defa21d68d163eeae8d03029fed83e96071edaacae3f694f2bd04","src/stable.rs":"c325eadc1f0a78c55117589e6bacb72dd295ccd02cb3e2dea13e1381ad2e972e","src/strnom.rs":"807c377bdb49b8b1c67d013089b8ff33fe93ffd3fa36b6440dbb1d6fe8cd9c17","src/unstable.rs":"a18f0fdb7c0670b73663dc708962959176ae5b1c2623e7f36c3767ed9c3bcfef","tests/test.rs":"428f4298e16a23db8f8fbb6101a30e993f08dc0befa2d95439dcefb364d7a7cf"},"package":"cccdc7557a98fe98453030f077df7f3a042052fae465bb61d2c2c41435cfd9b6"}
\ No newline at end of file
+{"files":{"Cargo.toml":"f020c87cba7dd2260861239307b2cb93e16c2bed6e2ef6c9178642b1dfcc43a3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"538fd635d385b6a90ef4cc1e361aad717162a139e932a6192212cad8407aa8e1","build.rs":"7698abdd3087e0f3308916c37ade3349b6b000165186b80913013af18d36ecb6","src/lib.rs":"c5c276236d828189a5151c890a66f2b7d1c02beca98f08f2d9c01166df441eb2","src/stable.rs":"a1f29e850e5fc4c602ee1204847124e266087175695d77ec448016db910acb6b","src/strnom.rs":"807c377bdb49b8b1c67d013089b8ff33fe93ffd3fa36b6440dbb1d6fe8cd9c17","src/unstable.rs":"0b7f86862d8254104330d14837ea6ec89e7b3bf2ffe910b73629269f2bc282de","tests/marker.rs":"0227d07bbc7f2e2ad34662a6acb65668b7dc2f79141c4faa672703a04e27bea0","tests/test.rs":"427821bab498926aa56bfcea7d28c36fb24a7d63d7f59d3e7e097bcfc77fe95b"},"package":"77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"}
\ No newline at end of file
diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
index 009134c0e0c07..778fdb8021632 100644
--- a/third_party/rust/proc-macro2/Cargo.toml
+++ b/third_party/rust/proc-macro2/Cargo.toml
@@ -12,8 +12,9 @@
[package]
name = "proc-macro2"
-version = "0.4.9"
+version = "0.4.24"
authors = ["Alex Crichton "]
+build = "build.rs"
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
homepage = "https://github.com/alexcrichton/proc-macro2"
documentation = "https://docs.rs/proc-macro2"
@@ -22,14 +23,16 @@ keywords = ["macros"]
license = "MIT/Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[package.metadata.docs.rs]
+rustc-args = ["--cfg", "procmacro2_semver_exempt"]
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
-
-[lib]
-doctest = false
[dependencies.unicode-xid]
version = "0.1"
+[dev-dependencies.quote]
+version = "0.6"
[features]
default = ["proc-macro"]
nightly = ["proc-macro"]
proc-macro = []
+[badges.travis-ci]
+repository = "alexcrichton/proc-macro2"
diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
index de265776664b6..f11c4dca65ae5 100644
--- a/third_party/rust/proc-macro2/README.md
+++ b/third_party/rust/proc-macro2/README.md
@@ -5,25 +5,20 @@
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
A small shim over the `proc_macro` crate in the compiler intended to multiplex
-the current stable interface (as of 2017-07-05) and the [upcoming richer
-interface][upcoming].
+the stable interface as of 1.15.0 and the interface as of 1.30.0.
-[upcoming]: https://github.com/rust-lang/rust/pull/40939
-
-The upcoming support has features like:
+New features added in Rust 1.30.0 include:
* Span information on tokens
* No need to go in/out through strings
* Structured input/output
-The hope is that libraries ported to `proc_macro2` will be trivial to port to
-the real `proc_macro` crate once the support on nightly is stabilized.
+Libraries ported to `proc_macro2` can retain support for older compilers while
+continuing to get all the nice benefits of using a 1.30.0+ compiler.
## Usage
-This crate by default compiles on the stable version of the compiler. It only
-uses the stable surface area of the `proc_macro` crate upstream in the compiler
-itself. Usage is done via:
+This crate compiles on all 1.15.0+ stable compilers and usage looks like:
```toml
[dependencies]
@@ -48,23 +43,13 @@ pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
}
```
-If you'd like you can enable the `nightly` feature in this crate. This will
-cause it to compile against the **unstable and nightly-only** features of the
-`proc_macro` crate. This in turn requires a nightly compiler. This should help
-preserve span information, however, coming in from the compiler itself.
-
-You can enable this feature via:
-
-```toml
-[dependencies]
-proc-macro2 = { version = "0.4", features = ["nightly"] }
-```
-
+The 1.30.0 compiler is automatically detected and its interfaces are used when
+available.
## Unstable Features
`proc-macro2` supports exporting some methods from `proc_macro` which are
-currently highly unstable, and may not be stabilized in the first pass of
+currently highly unstable, and are not stabilized in the first pass of
`proc_macro` stabilizations. These features are not exported by default. Minor
versions of `proc-macro2` may make breaking changes to them at any time.
@@ -79,7 +64,6 @@ Note that this must not only be done for your crate, but for any crate that
depends on your crate. This infectious nature is intentional, as it serves as a
reminder that you are outside of the normal semver guarantees.
-
# License
This project is licensed under either of
diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
new file mode 100644
index 0000000000000..cd6df2379e6f3
--- /dev/null
+++ b/third_party/rust/proc-macro2/build.rs
@@ -0,0 +1,67 @@
+use std::env;
+use std::process::Command;
+use std::str;
+
+fn main() {
+ println!("cargo:rerun-if-changed=build.rs");
+
+ let target = env::var("TARGET").unwrap();
+
+ let minor = match rustc_minor_version() {
+ Some(n) => n,
+ None => return,
+ };
+
+ if minor >= 26 {
+ println!("cargo:rustc-cfg=u128");
+ }
+
+ if !enable_use_proc_macro(&target) {
+ return;
+ }
+ println!("cargo:rustc-cfg=use_proc_macro");
+
+ // Rust 1.29 stabilized the necessary APIs in the `proc_macro` crate
+ if (minor >= 29 && !cfg!(procmacro2_semver_exempt)) || cfg!(feature = "nightly") {
+ println!("cargo:rustc-cfg=wrap_proc_macro");
+
+ if cfg!(procmacro2_semver_exempt) {
+ println!("cargo:rustc-cfg=super_unstable");
+ // https://github.com/alexcrichton/proc-macro2/issues/147
+ println!("cargo:rustc-cfg=procmacro2_semver_exempt");
+ }
+ }
+
+ if minor == 29 {
+ println!("cargo:rustc-cfg=slow_extend");
+ }
+}
+
+fn enable_use_proc_macro(target: &str) -> bool {
+ // wasm targets don't have the `proc_macro` crate, disable this feature.
+ if target.contains("wasm32") {
+ return false;
+ }
+
+ // Otherwise, only enable it if our feature is actually enabled.
+ cfg!(feature = "proc-macro")
+}
+
+fn rustc_minor_version() -> Option {
+ macro_rules! otry {
+ ($e:expr) => {
+ match $e {
+ Some(e) => e,
+ None => return None,
+ }
+ };
+ }
+ let rustc = otry!(env::var_os("RUSTC"));
+ let output = otry!(Command::new(rustc).arg("--version").output().ok());
+ let version = otry!(str::from_utf8(&output.stdout).ok());
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+ otry!(pieces.next()).parse().ok()
+}
diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
index 71552a299caca..db51272275abc 100644
--- a/third_party/rust/proc-macro2/src/lib.rs
+++ b/third_party/rust/proc-macro2/src/lib.rs
@@ -43,10 +43,13 @@
//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.9")]
-#![cfg_attr(feature = "nightly", feature(proc_macro_raw_ident, proc_macro_span))]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.24")]
+#![cfg_attr(
+ super_unstable,
+ feature(proc_macro_raw_ident, proc_macro_span, proc_macro_def_site)
+)]
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
extern crate proc_macro;
extern crate unicode_xid;
@@ -55,6 +58,8 @@ use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::marker;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::PathBuf;
use std::rc::Rc;
use std::str::FromStr;
@@ -62,10 +67,10 @@ use std::str::FromStr;
mod strnom;
mod stable;
-#[cfg(not(feature = "nightly"))]
+#[cfg(not(wrap_proc_macro))]
use stable as imp;
#[path = "unstable.rs"]
-#[cfg(feature = "nightly")]
+#[cfg(wrap_proc_macro)]
mod imp;
/// An abstract stream of tokens, or more concretely a sequence of token trees.
@@ -146,14 +151,14 @@ impl FromStr for TokenStream {
}
}
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
impl From for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
TokenStream::_new(inner.into())
}
}
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
impl From for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
inner.inner.into()
@@ -166,12 +171,24 @@ impl Extend for TokenStream {
}
}
+impl Extend for TokenStream {
+ fn extend>(&mut self, streams: I) {
+ self.inner
+ .extend(streams.into_iter().map(|stream| stream.inner))
+ }
+}
+
/// Collects a number of token trees into a single stream.
impl FromIterator for TokenStream {
fn from_iter>(streams: I) -> Self {
TokenStream::_new(streams.into_iter().collect())
}
}
+impl FromIterator for TokenStream {
+ fn from_iter>(streams: I) -> Self {
+ TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
+ }
+}
/// Prints the token stream as a string that is supposed to be losslessly
/// convertible back into the same token stream (modulo spans), except for
@@ -196,19 +213,25 @@ impl fmt::Debug for LexError {
}
}
-// Returned by reference, so we can't easily wrap it.
-#[cfg(procmacro2_semver_exempt)]
-pub use imp::FileName;
-
/// The source file of a given `Span`.
///
/// This type is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile(imp::SourceFile);
+pub struct SourceFile {
+ inner: imp::SourceFile,
+ _marker: marker::PhantomData>,
+}
#[cfg(procmacro2_semver_exempt)]
impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner: inner,
+ _marker: marker::PhantomData,
+ }
+ }
+
/// Get the path to this source file.
///
/// ### Note
@@ -222,28 +245,21 @@ impl SourceFile {
/// may not actually be valid.
///
/// [`is_real`]: #method.is_real
- pub fn path(&self) -> &FileName {
- self.0.path()
+ pub fn path(&self) -> PathBuf {
+ self.inner.path()
}
/// Returns `true` if this source file is a real source file, and not
/// generated by an external macro's expansion.
pub fn is_real(&self) -> bool {
- self.0.is_real()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl AsRef for SourceFile {
- fn as_ref(&self) -> &FileName {
- self.0.path()
+ self.inner.is_real()
}
}
#[cfg(procmacro2_semver_exempt)]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.0.fmt(f)
+ self.inner.fmt(f)
}
}
@@ -318,7 +334,8 @@ impl Span {
}
/// This method is only available when the `"nightly"` feature is enabled.
- #[cfg(all(feature = "nightly", feature = "proc-macro"))]
+ #[doc(hidden)]
+ #[cfg(any(feature = "nightly", super_unstable))]
pub fn unstable(self) -> proc_macro::Span {
self.inner.unstable()
}
@@ -328,7 +345,7 @@ impl Span {
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
- SourceFile(self.inner.source_file())
+ SourceFile::_new(self.inner.source_file())
}
/// Get the starting line/column in the source file for this span.
@@ -486,9 +503,7 @@ impl fmt::Debug for TokenTree {
/// `Delimiter`s.
#[derive(Clone)]
pub struct Group {
- delimiter: Delimiter,
- stream: TokenStream,
- span: Span,
+ inner: imp::Group,
}
/// Describes how a sequence of token trees is delimited.
@@ -511,6 +526,18 @@ pub enum Delimiter {
}
impl Group {
+ fn _new(inner: imp::Group) -> Self {
+ Group {
+ inner: inner,
+ }
+ }
+
+ fn _new_stable(inner: stable::Group) -> Self {
+ Group {
+ inner: inner.into(),
+ }
+ }
+
/// Creates a new `Group` with the given delimiter and token stream.
///
/// This constructor will set the span for this group to
@@ -518,15 +545,13 @@ impl Group {
/// method below.
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
Group {
- delimiter: delimiter,
- stream: stream,
- span: Span::call_site(),
+ inner: imp::Group::new(delimiter, stream.inner),
}
}
/// Returns the delimiter of this `Group`
pub fn delimiter(&self) -> Delimiter {
- self.delimiter
+ self.inner.delimiter()
}
/// Returns the `TokenStream` of tokens that are delimited in this `Group`.
@@ -534,13 +559,40 @@ impl Group {
/// Note that the returned token stream does not include the delimiter
/// returned above.
pub fn stream(&self) -> TokenStream {
- self.stream.clone()
+ TokenStream::_new(self.inner.stream())
}
/// Returns the span for the delimiters of this token stream, spanning the
/// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
pub fn span(&self) -> Span {
- self.span
+ Span::_new(self.inner.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn span_open(&self) -> Span {
+ Span::_new(self.inner.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn span_close(&self) -> Span {
+ Span::_new(self.inner.span_close())
}
/// Configures the span for this `Group`'s delimiters, but not its internal
@@ -550,7 +602,7 @@ impl Group {
/// by this group, but rather it will only set the span of the delimiter
/// tokens at the level of the `Group`.
pub fn set_span(&mut self, span: Span) {
- self.span = span;
+ self.inner.set_span(span.inner)
}
}
@@ -558,30 +610,14 @@ impl Group {
/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
/// with `Delimiter::None` delimiters.
impl fmt::Display for Group {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let (left, right) = match self.delimiter {
- Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
-
- f.write_str(left)?;
- self.stream.fmt(f)?;
- f.write_str(right)?;
-
- Ok(())
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.inner, formatter)
}
}
impl fmt::Debug for Group {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Group");
- debug.field("delimiter", &self.delimiter);
- debug.field("stream", &self.stream);
- #[cfg(procmacro2_semver_exempt)]
- debug.field("span", &self.span);
- debug.finish()
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&self.inner, formatter)
}
}
@@ -679,11 +715,11 @@ impl fmt::Debug for Punct {
/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
///
/// An identifier constructed with `Ident::new` is permitted to be a Rust
-/// keyword, though parsing one through its [`Synom`] implementation rejects
-/// Rust keywords. Use `call!(Ident::parse_any)` when parsing to match the
+/// keyword, though parsing one through its [`Parse`] implementation rejects
+/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
/// behaviour of `Ident::new`.
///
-/// [`Synom`]: https://docs.rs/syn/0.14/syn/synom/trait.Synom.html
+/// [`Parse`]: https://docs.rs/syn/0.15/syn/parse/trait.Parse.html
///
/// # Examples
///
@@ -811,7 +847,7 @@ impl Ident {
impl PartialEq for Ident {
fn eq(&self, other: &Ident) -> bool {
- self.to_string() == other.to_string()
+ self.inner == other.inner
}
}
@@ -820,7 +856,7 @@ where
T: ?Sized + AsRef,
{
fn eq(&self, other: &T) -> bool {
- self.to_string() == other.as_ref()
+ self.inner == other
}
}
@@ -938,6 +974,12 @@ impl Literal {
isize_suffixed => isize,
}
+ #[cfg(u128)]
+ suffixed_int_literals! {
+ u128_suffixed => u128,
+ i128_suffixed => i128,
+ }
+
unsuffixed_int_literals! {
u8_unsuffixed => u8,
u16_unsuffixed => u16,
@@ -951,6 +993,12 @@ impl Literal {
isize_unsuffixed => isize,
}
+ #[cfg(u128)]
+ unsuffixed_int_literals! {
+ u128_unsuffixed => u128,
+ i128_unsuffixed => i128,
+ }
+
pub fn f64_unsuffixed(f: f64) -> Literal {
assert!(f.is_finite());
Literal::_new(imp::Literal::f64_unsuffixed(f))
diff --git a/third_party/rust/proc-macro2/src/stable.rs b/third_party/rust/proc-macro2/src/stable.rs
index 73912f5884308..baeed69af6c86 100644
--- a/third_party/rust/proc-macro2/src/stable.rs
+++ b/third_party/rust/proc-macro2/src/stable.rs
@@ -6,13 +6,16 @@ use std::cell::RefCell;
use std::cmp;
use std::fmt;
use std::iter;
+#[cfg(procmacro2_semver_exempt)]
+use std::path::Path;
+use std::path::PathBuf;
use std::str::FromStr;
use std::vec;
use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
use unicode_xid::UnicodeXID;
-use {Delimiter, Group, Punct, Spacing, TokenTree};
+use {Delimiter, Punct, Spacing, TokenTree};
#[derive(Clone)]
pub struct TokenStream {
@@ -116,7 +119,7 @@ impl fmt::Debug for TokenStream {
}
}
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
impl From<::proc_macro::TokenStream> for TokenStream {
fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
inner
@@ -126,7 +129,7 @@ impl From<::proc_macro::TokenStream> for TokenStream {
}
}
-#[cfg(feature = "proc-macro")]
+#[cfg(use_proc_macro)]
impl From for ::proc_macro::TokenStream {
fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
inner
@@ -154,12 +157,31 @@ impl iter::FromIterator for TokenStream {
}
}
+impl iter::FromIterator for TokenStream {
+ fn from_iter>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+ for stream in streams.into_iter() {
+ v.extend(stream.inner);
+ }
+
+ TokenStream { inner: v }
+ }
+}
+
impl Extend for TokenStream {
fn extend>(&mut self, streams: I) {
self.inner.extend(streams);
}
}
+impl Extend for TokenStream {
+ fn extend>(&mut self, streams: I) {
+ self.inner
+ .extend(streams.into_iter().flat_map(|stream| stream));
+ }
+}
+
pub type TokenTreeIter = vec::IntoIter;
impl IntoIterator for TokenStream {
@@ -171,29 +193,15 @@ impl IntoIterator for TokenStream {
}
}
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct FileName(String);
-
-#[allow(dead_code)]
-pub fn file_name(s: String) -> FileName {
- FileName(s)
-}
-
-impl fmt::Display for FileName {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.0.fmt(f)
- }
-}
-
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile {
- name: FileName,
+ path: PathBuf,
}
impl SourceFile {
/// Get the path to this source file as a string.
- pub fn path(&self) -> &FileName {
- &self.name
+ pub fn path(&self) -> PathBuf {
+ self.path.clone()
}
pub fn is_real(&self) -> bool {
@@ -202,12 +210,6 @@ impl SourceFile {
}
}
-impl AsRef for SourceFile {
- fn as_ref(&self) -> &FileName {
- self.path()
- }
-}
-
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SourceFile")
@@ -363,7 +365,7 @@ impl Span {
let cm = cm.borrow();
let fi = cm.fileinfo(*self);
SourceFile {
- name: FileName(fi.name.clone()),
+ path: Path::new(&fi.name).to_owned(),
}
})
}
@@ -412,6 +414,75 @@ impl fmt::Debug for Span {
}
}
+#[derive(Clone)]
+pub struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+}
+
+impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group {
+ delimiter: delimiter,
+ stream: stream,
+ span: Span::call_site(),
+ }
+ }
+
+ pub fn delimiter(&self) -> Delimiter {
+ self.delimiter
+ }
+
+ pub fn stream(&self) -> TokenStream {
+ self.stream.clone()
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn span_open(&self) -> Span {
+ self.span
+ }
+
+ pub fn span_close(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let (left, right) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Brace => ("{", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+ f.write_str(left)?;
+ self.stream.fmt(f)?;
+ f.write_str(right)?;
+
+ Ok(())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+ #[cfg(procmacro2_semver_exempt)]
+ debug.field("span", &self.span);
+ debug.finish()
+ }
+}
+
#[derive(Clone)]
pub struct Ident {
sym: String,
@@ -493,6 +564,26 @@ fn validate_term(string: &str) {
}
}
+impl PartialEq for Ident {
+ fn eq(&self, other: &Ident) -> bool {
+ self.sym == other.sym && self.raw == other.raw
+ }
+}
+
+impl PartialEq for Ident
+where
+ T: ?Sized + AsRef,
+{
+ fn eq(&self, other: &T) -> bool {
+ let other = other.as_ref();
+ if self.raw {
+ other.starts_with("r#") && self.sym == other[2..]
+ } else {
+ self.sym == other
+ }
+ }
+}
+
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.raw {
@@ -570,6 +661,12 @@ impl Literal {
f64_suffixed => f64,
}
+ #[cfg(u128)]
+ suffixed_numbers! {
+ u128_suffixed => u128,
+ i128_suffixed => i128,
+ }
+
unsuffixed_numbers! {
u8_unsuffixed => u8,
u16_unsuffixed => u16,
@@ -583,6 +680,12 @@ impl Literal {
isize_unsuffixed => isize,
}
+ #[cfg(u128)]
+ unsuffixed_numbers! {
+ u128_unsuffixed => u128,
+ i128_unsuffixed => i128,
+ }
+
pub fn f32_unsuffixed(f: f32) -> Literal {
let mut s = f.to_string();
if !s.contains(".") {
@@ -708,7 +811,7 @@ fn token_tree(input: Cursor) -> PResult {
}
named!(token_kind -> TokenTree, alt!(
- map!(group, TokenTree::Group)
+ map!(group, |g| TokenTree::Group(::Group::_new_stable(g)))
|
map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol
|
@@ -722,19 +825,19 @@ named!(group -> Group, alt!(
punct!("("),
token_stream,
punct!(")")
- ) => { |ts| Group::new(Delimiter::Parenthesis, ::TokenStream::_new_stable(ts)) }
+ ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
|
delimited!(
punct!("["),
token_stream,
punct!("]")
- ) => { |ts| Group::new(Delimiter::Bracket, ::TokenStream::_new_stable(ts)) }
+ ) => { |ts| Group::new(Delimiter::Bracket, ts) }
|
delimited!(
punct!("{"),
token_stream,
punct!("}")
- ) => { |ts| Group::new(Delimiter::Brace, ::TokenStream::_new_stable(ts)) }
+ ) => { |ts| Group::new(Delimiter::Brace, ts) }
));
fn symbol_leading_ws(input: Cursor) -> PResult {
@@ -1249,7 +1352,8 @@ fn doc_comment(input: Cursor) -> PResult> {
for tt in stream.iter_mut() {
tt.set_span(span);
}
- trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
+ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+ trees.push(::Group::_new_stable(group).into());
for tt in trees.iter_mut() {
tt.set_span(span);
}
diff --git a/third_party/rust/proc-macro2/src/unstable.rs b/third_party/rust/proc-macro2/src/unstable.rs
index ca13edfded270..c4cf11a8da5f1 100644
--- a/third_party/rust/proc-macro2/src/unstable.rs
+++ b/third_party/rust/proc-macro2/src/unstable.rs
@@ -1,14 +1,16 @@
-#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
+#![cfg_attr(not(super_unstable), allow(dead_code))]
use std::fmt;
use std::iter;
-use std::panic;
+use std::panic::{self, PanicInfo};
+#[cfg(super_unstable)]
+use std::path::PathBuf;
use std::str::FromStr;
use proc_macro;
use stable;
-use {Delimiter, Group, Punct, Spacing, TokenTree};
+use {Delimiter, Punct, Spacing, TokenTree};
#[derive(Clone)]
pub enum TokenStream {
@@ -23,16 +25,59 @@ pub enum LexError {
fn nightly_works() -> bool {
use std::sync::atomic::*;
+ use std::sync::Once;
+
static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
+ static INIT: Once = Once::new();
match WORKS.load(Ordering::SeqCst) {
1 => return false,
2 => return true,
_ => {}
}
- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
- WORKS.store(works as usize + 1, Ordering::SeqCst);
- works
+
+ // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+ // then use catch_unwind to determine whether the compiler's proc_macro is
+ // working. When proc-macro2 is used from outside of a procedural macro all
+ // of the proc_macro crate's APIs currently panic.
+ //
+ // The Once is to prevent the possibility of this ordering:
+ //
+ // thread 1 calls take_hook, gets the user's original hook
+ // thread 1 calls set_hook with the null hook
+ // thread 2 calls take_hook, thinks null hook is the original hook
+ // thread 2 calls set_hook with the null hook
+ // thread 1 calls set_hook with the actual original hook
+ // thread 2 calls set_hook with what it thinks is the original hook
+ //
+ // in which the user's hook has been lost.
+ //
+ // There is still a race condition where a panic in a different thread can
+ // happen during the interval that the user's original panic hook is
+ // unregistered such that their hook is incorrectly not called. This is
+ // sufficiently unlikely and less bad than printing panic messages to stderr
+ // on correct use of this crate. Maybe there is a libstd feature request
+ // here. For now, if a user needs to guarantee that this failure mode does
+ // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+ // the main thread before launching any other threads.
+ INIT.call_once(|| {
+ type PanicHook = Fn(&PanicInfo) + Sync + Send + 'static;
+
+ let null_hook: Box = Box::new(|_panic_info| { /* ignore */ });
+ let sanity_check = &*null_hook as *const PanicHook;
+ let original_hook = panic::take_hook();
+ panic::set_hook(null_hook);
+
+ let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+ WORKS.store(works as usize + 1, Ordering::SeqCst);
+
+ let hopefully_null_hook = panic::take_hook();
+ panic::set_hook(original_hook);
+ if sanity_check != &*hopefully_null_hook {
+ panic!("observed race condition in proc_macro2::nightly_works");
+ }
+ });
+ nightly_works()
}
fn mismatch() -> ! {
@@ -61,6 +106,13 @@ impl TokenStream {
TokenStream::Stable(_) => mismatch(),
}
}
+
+ fn unwrap_stable(self) -> stable::TokenStream {
+ match self {
+ TokenStream::Nightly(_) => mismatch(),
+ TokenStream::Stable(s) => s,
+ }
+ }
}
impl FromStr for TokenStream {
@@ -111,18 +163,7 @@ impl From for TokenStream {
return TokenStream::Stable(token.into());
}
let tt: proc_macro::TokenTree = match token {
- TokenTree::Group(tt) => {
- let delim = match tt.delimiter() {
- Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
- Delimiter::Bracket => proc_macro::Delimiter::Bracket,
- Delimiter::Brace => proc_macro::Delimiter::Brace,
- Delimiter::None => proc_macro::Delimiter::None,
- };
- let span = tt.span();
- let mut group = proc_macro::Group::new(delim, tt.stream.inner.unwrap_nightly());
- group.set_span(span.inner.unwrap_nightly());
- group.into()
- }
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
TokenTree::Punct(tt) => {
let spacing = match tt.spacing() {
Spacing::Joint => proc_macro::Spacing::Joint,
@@ -156,29 +197,108 @@ impl iter::FromIterator for TokenStream {
}
}
+impl iter::FromIterator for TokenStream {
+ fn from_iter>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+ #[cfg(slow_extend)]
+ Some(TokenStream::Nightly(first)) => {
+ let stream = iter::once(first).chain(streams.map(|s| {
+ match s {
+ TokenStream::Nightly(s) => s,
+ TokenStream::Stable(_) => mismatch(),
+ }
+ })).collect();
+ TokenStream::Nightly(stream)
+ }
+ #[cfg(not(slow_extend))]
+ Some(TokenStream::Nightly(mut first)) => {
+ first.extend(streams.map(|s| {
+ match s {
+ TokenStream::Nightly(s) => s,
+ TokenStream::Stable(_) => mismatch(),
+ }
+ }));
+ TokenStream::Nightly(first)
+ }
+ Some(TokenStream::Stable(mut first)) => {
+ first.extend(streams.map(|s| {
+ match s {
+ TokenStream::Stable(s) => s,
+ TokenStream::Nightly(_) => mismatch(),
+ }
+ }));
+ TokenStream::Stable(first)
+ }
+ None => TokenStream::new(),
+
+ }
+ }
+}
+
impl Extend for TokenStream {
fn extend>(&mut self, streams: I) {
match self {
TokenStream::Nightly(tts) => {
- *tts = tts
- .clone()
- .into_iter()
- .chain(
+ #[cfg(not(slow_extend))]
+ {
+ tts.extend(
streams
.into_iter()
- .map(TokenStream::from)
- .flat_map(|t| match t {
- TokenStream::Nightly(tts) => tts.into_iter(),
- _ => panic!(),
- }),
- )
- .collect();
+ .map(|t| TokenStream::from(t).unwrap_nightly()),
+ );
+ }
+ #[cfg(slow_extend)]
+ {
+ *tts = tts
+ .clone()
+ .into_iter()
+ .chain(
+ streams
+ .into_iter()
+ .map(TokenStream::from)
+ .flat_map(|t| match t {
+ TokenStream::Nightly(tts) => tts.into_iter(),
+ _ => mismatch(),
+ }),
+ ).collect();
+ }
}
TokenStream::Stable(tts) => tts.extend(streams),
}
}
}
+impl Extend for TokenStream {
+ fn extend>(&mut self, streams: I) {
+ match self {
+ TokenStream::Nightly(tts) => {
+ #[cfg(not(slow_extend))]
+ {
+ tts.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
+ }
+ #[cfg(slow_extend)]
+ {
+ *tts = tts
+ .clone()
+ .into_iter()
+ .chain(
+ streams
+ .into_iter()
+ .flat_map(|t| match t {
+ TokenStream::Nightly(tts) => tts.into_iter(),
+ _ => mismatch(),
+ }),
+ ).collect();
+ }
+ }
+ TokenStream::Stable(tts) => {
+ tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()))
+ }
+ }
+ }
+}
+
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
@@ -235,18 +355,7 @@ impl Iterator for TokenTreeIter {
TokenTreeIter::Stable(iter) => return iter.next(),
};
Some(match token {
- proc_macro::TokenTree::Group(tt) => {
- let delim = match tt.delimiter() {
- proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
- proc_macro::Delimiter::Bracket => Delimiter::Bracket,
- proc_macro::Delimiter::Brace => Delimiter::Brace,
- proc_macro::Delimiter::None => Delimiter::None,
- };
- let stream = ::TokenStream::_new(TokenStream::Nightly(tt.stream()));
- let mut g = Group::new(delim, stream);
- g.set_span(::Span::_new(Span::Nightly(tt.span())));
- g.into()
- }
+ proc_macro::TokenTree::Group(tt) => ::Group::_new(Group::Nightly(tt)).into(),
proc_macro::TokenTree::Punct(tt) => {
let spacing = match tt.spacing() {
proc_macro::Spacing::Joint => Spacing::Joint,
@@ -275,48 +384,40 @@ impl fmt::Debug for TokenTreeIter {
}
}
-pub use stable::FileName;
-
-// NOTE: We have to generate our own filename object here because we can't wrap
-// the one provided by proc_macro.
#[derive(Clone, PartialEq, Eq)]
+#[cfg(super_unstable)]
pub enum SourceFile {
- Nightly(proc_macro::SourceFile, FileName),
+ Nightly(proc_macro::SourceFile),
Stable(stable::SourceFile),
}
+#[cfg(super_unstable)]
impl SourceFile {
fn nightly(sf: proc_macro::SourceFile) -> Self {
- let filename = stable::file_name(sf.path().display().to_string());
- SourceFile::Nightly(sf, filename)
+ SourceFile::Nightly(sf)
}
/// Get the path to this source file as a string.
- pub fn path(&self) -> &FileName {
+ pub fn path(&self) -> PathBuf {
match self {
- SourceFile::Nightly(_, f) => f,
+ SourceFile::Nightly(a) => a.path(),
SourceFile::Stable(a) => a.path(),
}
}
pub fn is_real(&self) -> bool {
match self {
- SourceFile::Nightly(a, _) => a.is_real(),
+ SourceFile::Nightly(a) => a.is_real(),
SourceFile::Stable(a) => a.is_real(),
}
}
}
-impl AsRef for SourceFile {
- fn as_ref(&self) -> &FileName {
- self.path()
- }
-}
-
+#[cfg(super_unstable)]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
- SourceFile::Nightly(a, _) => a.fmt(f),
+ SourceFile::Nightly(a) => a.fmt(f),
SourceFile::Stable(a) => a.fmt(f),
}
}
@@ -342,6 +443,7 @@ impl Span {
}
}
+ #[cfg(super_unstable)]
pub fn def_site() -> Span {
if nightly_works() {
Span::Nightly(proc_macro::Span::def_site())
@@ -350,6 +452,7 @@ impl Span {
}
}
+ #[cfg(super_unstable)]
pub fn resolved_at(&self, other: Span) -> Span {
match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.resolved_at(b)),
@@ -358,6 +461,7 @@ impl Span {
}
}
+ #[cfg(super_unstable)]
pub fn located_at(&self, other: Span) -> Span {
match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.located_at(b)),
@@ -373,7 +477,7 @@ impl Span {
}
}
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(super_unstable)]
pub fn source_file(&self) -> SourceFile {
match self {
Span::Nightly(s) => SourceFile::nightly(s.source_file()),
@@ -381,7 +485,7 @@ impl Span {
}
}
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(super_unstable)]
pub fn start(&self) -> LineColumn {
match self {
Span::Nightly(s) => {
@@ -395,7 +499,7 @@ impl Span {
}
}
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(super_unstable)]
pub fn end(&self) -> LineColumn {
match self {
Span::Nightly(s) => {
@@ -409,7 +513,7 @@ impl Span {
}
}
- #[cfg(procmacro2_semver_exempt)]
+ #[cfg(super_unstable)]
pub fn join(&self, other: Span) -> Option {
let ret = match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.join(b)?),
@@ -419,6 +523,7 @@ impl Span {
Some(ret)
}
+ #[cfg(super_unstable)]
pub fn eq(&self, other: &Span) -> bool {
match (self, other) {
(Span::Nightly(a), Span::Nightly(b)) => a.eq(b),
@@ -456,6 +561,112 @@ impl fmt::Debug for Span {
}
}
+#[derive(Clone)]
+pub enum Group {
+ Nightly(proc_macro::Group),
+ Stable(stable::Group),
+}
+
+impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ match stream {
+ TokenStream::Nightly(stream) => {
+ let delimiter = match delimiter {
+ Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
+ Delimiter::Bracket => proc_macro::Delimiter::Bracket,
+ Delimiter::Brace => proc_macro::Delimiter::Brace,
+ Delimiter::None => proc_macro::Delimiter::None,
+ };
+ Group::Nightly(proc_macro::Group::new(delimiter, stream))
+ }
+ TokenStream::Stable(stream) => {
+ Group::Stable(stable::Group::new(delimiter, stream))
+ }
+ }
+ }
+
+ pub fn delimiter(&self) -> Delimiter {
+ match self {
+ Group::Nightly(g) => match g.delimiter() {
+ proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
+ proc_macro::Delimiter::Bracket => Delimiter::Bracket,
+ proc_macro::Delimiter::Brace => Delimiter::Brace,
+ proc_macro::Delimiter::None => Delimiter::None,
+ }
+ Group::Stable(g) => g.delimiter(),
+ }
+ }
+
+ pub fn stream(&self) -> TokenStream {
+ match self {
+ Group::Nightly(g) => TokenStream::Nightly(g.stream()),
+ Group::Stable(g) => TokenStream::Stable(g.stream()),
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+ Group::Nightly(g) => Span::Nightly(g.span()),
+ Group::Stable(g) => Span::Stable(g.span()),
+ }
+ }
+
+ #[cfg(super_unstable)]
+ pub fn span_open(&self) -> Span {
+ match self {
+ Group::Nightly(g) => Span::Nightly(g.span_open()),
+ Group::Stable(g) => Span::Stable(g.span_open()),
+ }
+ }
+
+ #[cfg(super_unstable)]
+ pub fn span_close(&self) -> Span {
+ match self {
+ Group::Nightly(g) => Span::Nightly(g.span_close()),
+ Group::Stable(g) => Span::Stable(g.span_close()),
+ }
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ match (self, span) {
+ (Group::Nightly(g), Span::Nightly(s)) => g.set_span(s),
+ (Group::Stable(g), Span::Stable(s)) => g.set_span(s),
+ _ => mismatch(),
+ }
+ }
+
+ fn unwrap_nightly(self) -> proc_macro::Group {
+ match self {
+ Group::Nightly(g) => g,
+ Group::Stable(_) => mismatch(),
+ }
+ }
+}
+
+impl From for Group {
+ fn from(g: stable::Group) -> Self {
+ Group::Stable(g)
+ }
+}
+
+impl fmt::Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Group::Nightly(group) => group.fmt(formatter),
+ Group::Stable(group) => group.fmt(formatter),
+ }
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Group::Nightly(group) => group.fmt(formatter),
+ Group::Stable(group) => group.fmt(formatter),
+ }
+ }
+}
+
#[derive(Clone)]
pub enum Ident {
Nightly(proc_macro::Ident),
@@ -472,7 +683,17 @@ impl Ident {
pub fn new_raw(string: &str, span: Span) -> Ident {
match span {
- Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new_raw(string, s)),
+ Span::Nightly(s) => {
+ let p: proc_macro::TokenStream = string.parse().unwrap();
+ let ident = match p.into_iter().next() {
+ Some(proc_macro::TokenTree::Ident(mut i)) => {
+ i.set_span(s);
+ i
+ }
+ _ => panic!(),
+ };
+ Ident::Nightly(ident)
+ }
Span::Stable(s) => Ident::Stable(stable::Ident::new_raw(string, s)),
}
}
@@ -500,6 +721,29 @@ impl Ident {
}
}
+impl PartialEq for Ident {
+ fn eq(&self, other: &Ident) -> bool {
+ match (self, other) {
+ (Ident::Nightly(t), Ident::Nightly(o)) => t.to_string() == o.to_string(),
+ (Ident::Stable(t), Ident::Stable(o)) => t == o,
+ _ => mismatch(),
+ }
+ }
+}
+
+impl PartialEq for Ident
+where
+ T: ?Sized + AsRef,
+{
+ fn eq(&self, other: &T) -> bool {
+ let other = other.as_ref();
+ match self {
+ Ident::Nightly(t) => t.to_string() == other,
+ Ident::Stable(t) => t == other,
+ }
+ }
+}
+
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
@@ -565,6 +809,12 @@ impl Literal {
f64_suffixed => f64,
}
+ #[cfg(u128)]
+ suffixed_numbers! {
+ i128_suffixed => i128,
+ u128_suffixed => u128,
+ }
+
unsuffixed_integers! {
u8_unsuffixed => u8,
u16_unsuffixed => u16,
@@ -578,6 +828,12 @@ impl Literal {
isize_unsuffixed => isize,
}
+ #[cfg(u128)]
+ unsuffixed_integers! {
+ i128_unsuffixed => i128,
+ u128_unsuffixed => u128,
+ }
+
pub fn f32_unsuffixed(f: f32) -> Literal {
if nightly_works() {
Literal::Nightly(proc_macro::Literal::f32_unsuffixed(f))
diff --git a/third_party/rust/proc-macro2/tests/marker.rs b/third_party/rust/proc-macro2/tests/marker.rs
new file mode 100644
index 0000000000000..7bb5027621c98
--- /dev/null
+++ b/third_party/rust/proc-macro2/tests/marker.rs
@@ -0,0 +1,61 @@
+extern crate proc_macro2;
+
+use proc_macro2::*;
+
+macro_rules! assert_impl {
+ ($ty:ident is $($marker:ident) and +) => {
+ #[test]
+ #[allow(non_snake_case)]
+ fn $ty() {
+ fn assert_implemented() {}
+ assert_implemented::<$ty>();
+ }
+ };
+
+ ($ty:ident is not $($marker:ident) or +) => {
+ #[test]
+ #[allow(non_snake_case)]
+ fn $ty() {
+ $(
+ {
+ // Implemented for types that implement $marker.
+ trait IsNotImplemented {
+ fn assert_not_implemented() {}
+ }
+ impl IsNotImplemented for T {}
+
+ // Implemented for the type being tested.
+ trait IsImplemented {
+ fn assert_not_implemented() {}
+ }
+ impl IsImplemented for $ty {}
+
+ // If $ty does not implement $marker, there is no ambiguity
+ // in the following trait method call.
+ <$ty>::assert_not_implemented();
+ }
+ )+
+ }
+ };
+}
+
+assert_impl!(Delimiter is Send and Sync);
+assert_impl!(Spacing is Send and Sync);
+
+assert_impl!(Group is not Send or Sync);
+assert_impl!(Ident is not Send or Sync);
+assert_impl!(LexError is not Send or Sync);
+assert_impl!(Literal is not Send or Sync);
+assert_impl!(Punct is not Send or Sync);
+assert_impl!(Span is not Send or Sync);
+assert_impl!(TokenStream is not Send or Sync);
+assert_impl!(TokenTree is not Send or Sync);
+
+#[cfg(procmacro2_semver_exempt)]
+mod semver_exempt {
+ use super::*;
+
+ assert_impl!(LineColumn is Send and Sync);
+
+ assert_impl!(SourceFile is not Send or Sync);
+}
diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
index 5d2fb854454d1..9d37cf37d26de 100644
--- a/third_party/rust/proc-macro2/tests/test.rs
+++ b/third_party/rust/proc-macro2/tests/test.rs
@@ -203,7 +203,7 @@ fn default_span() {
assert_eq!(end.line, 1);
assert_eq!(end.column, 0);
let source_file = Span::call_site().source_file();
- assert_eq!(source_file.path().to_string(), "");
+ assert_eq!(source_file.path().to_string_lossy(), "");
assert!(!source_file.is_real());
}
diff --git a/third_party/rust/quote/.cargo-checksum.json b/third_party/rust/quote/.cargo-checksum.json
index 70415c6e55617..f1a67ee8111cf 100644
--- a/third_party/rust/quote/.cargo-checksum.json
+++ b/third_party/rust/quote/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"33e512b1a2fd40b4d0b5af4ac16ad4f163e0383ba2f4abcd7a7e575e2af3442c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"61dc7827fb2e29185f0d73594db326bfdbec8393ca7a48429b259711d42e80f9","src/ext.rs":"2e2f71fca8c8580eeed138da42d93dc21fc48d7a8da973ae6d3b616da6a3b0e3","src/lib.rs":"0dedf7935a0203324804cecdf6350245caa24dbdaaf9e168b7ab90b0883ec0c4","src/to_tokens.rs":"10dc32fbe69798408ee1f49ec25770b90eeb6b069552f50cd4e03228b8e85847","tests/test.rs":"90fe0e9a704e628339fe9298f0cb8307e94ebadfe28fffd7b2fc2d94203bc342"},"package":"e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"}
\ No newline at end of file
+{"files":{"Cargo.toml":"44cf9d3a28be1b21f4247572b6ca4d38dc3fd42fa84c4a4e0e5632aa27bee083","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"b43ef9b9c61628f8de7036271e61322cba23d878d056748e571f4f6cf9fba1b1","src/ext.rs":"a2def0b0f24c822b3f936a781c347e5f6fdc75120f85874c94f5e7eb708168c2","src/lib.rs":"f1ba768690c57252e8135ee474a20bdd513fd0bd0664e9e9b697800163f39d08","src/to_tokens.rs":"86c419a72017846ef33a0acc53caee7312c750c90b3f1d3b58e33f20efcb94f4","tests/conditional/integer128.rs":"d83e21a91efbaa801a82ae499111bdda2d31edaa620e78c0199eba42d69c9ee6","tests/test.rs":"810013d7fd77b738abd0ace90ce2f2f3e219c757652eabab29bc1c0ce4a73b24"},"package":"53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c"}
\ No newline at end of file
diff --git a/third_party/rust/quote/Cargo.toml b/third_party/rust/quote/Cargo.toml
index 5eb250726574b..c2c73ac9ea803 100644
--- a/third_party/rust/quote/Cargo.toml
+++ b/third_party/rust/quote/Cargo.toml
@@ -12,19 +12,22 @@
[package]
name = "quote"
-version = "0.6.3"
+version = "0.6.10"
authors = ["David Tolnay "]
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
description = "Quasi-quoting macro quote!(...)"
documentation = "https://docs.rs/quote/"
readme = "README.md"
keywords = ["syn"]
+categories = ["development-tools::procedural-macro-helpers"]
license = "MIT/Apache-2.0"
repository = "https://github.com/dtolnay/quote"
[dependencies.proc-macro2]
-version = "0.4.4"
+version = "0.4.21"
default-features = false
[features]
default = ["proc-macro"]
proc-macro = ["proc-macro2/proc-macro"]
+[badges.travis-ci]
+repository = "dtolnay/quote"
diff --git a/third_party/rust/quote/README.md b/third_party/rust/quote/README.md
index 0c799df261ebc..00232cd38b0ee 100644
--- a/third_party/rust/quote/README.md
+++ b/third_party/rust/quote/README.md
@@ -68,11 +68,11 @@ let tokens = quote! {
}
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
- fn serialize(&self, s: &mut S) -> Result<(), S::Error>
+ fn serialize(&self, serializer: S) -> Result
where
S: serde::Serializer,
{
- #path(self.value, s)
+ #path(self.value, serializer)
}
}
diff --git a/third_party/rust/quote/src/ext.rs b/third_party/rust/quote/src/ext.rs
index a707f09918955..614e1e1b2d358 100644
--- a/third_party/rust/quote/src/ext.rs
+++ b/third_party/rust/quote/src/ext.rs
@@ -1,5 +1,7 @@
use super::ToTokens;
+use std::iter;
+
use proc_macro2::{TokenStream, TokenTree};
/// TokenStream extension trait with methods for appending tokens.
@@ -36,7 +38,7 @@ impl TokenStreamExt for TokenStream {
where
U: Into,
{
- self.extend(Some(token.into()));
+ self.extend(iter::once(token.into()));
}
/// For use by `ToTokens` implementations.
diff --git a/third_party/rust/quote/src/lib.rs b/third_party/rust/quote/src/lib.rs
index 76e07f094f2da..8f0203b8e0812 100644
--- a/third_party/rust/quote/src/lib.rs
+++ b/third_party/rust/quote/src/lib.rs
@@ -67,11 +67,11 @@
//! }
//!
//! impl #generics serde::Serialize for SerializeWith #generics #where_clause {
-//! fn serialize(&self, s: &mut S) -> Result<(), S::Error>
+//! fn serialize(&self, serializer: S) -> Result
//! where
//! S: serde::Serializer,
//! {
-//! #path(self.value, s)
+//! #path(self.value, serializer)
//! }
//! }
//!
@@ -92,9 +92,12 @@
//! An even higher limit may be necessary for especially large invocations.
// Quote types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/quote/0.6.3")]
+#![doc(html_root_url = "https://docs.rs/quote/0.6.10")]
-#[cfg(feature = "proc-macro")]
+#[cfg(all(
+ not(all(target_arch = "wasm32", target_os = "unknown")),
+ feature = "proc-macro"
+))]
extern crate proc_macro;
extern crate proc_macro2;
@@ -107,17 +110,128 @@ pub use to_tokens::ToTokens;
// Not public API.
#[doc(hidden)]
pub mod __rt {
- // Not public API.
+ use ext::TokenStreamExt;
pub use proc_macro2::*;
- // Not public API.
+ fn is_ident_start(c: u8) -> bool {
+ (b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_'
+ }
+
+ fn is_ident_continue(c: u8) -> bool {
+ (b'a' <= c && c <= b'z')
+ || (b'A' <= c && c <= b'Z')
+ || c == b'_'
+ || (b'0' <= c && c <= b'9')
+ }
+
+ fn is_ident(token: &str) -> bool {
+ if token.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
+ return false;
+ }
+
+ let mut bytes = token.bytes();
+ let first = bytes.next().unwrap();
+ if !is_ident_start(first) {
+ return false;
+ }
+ for ch in bytes {
+ if !is_ident_continue(ch) {
+ return false;
+ }
+ }
+ true
+ }
+
pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
- let s: TokenStream = s.parse().expect("invalid token stream");
- tokens.extend(s.into_iter().map(|mut t| {
- t.set_span(span);
- t
- }));
+ if is_ident(s) {
+ // Fast path, since idents are the most common token.
+ tokens.append(Ident::new(s, span));
+ } else {
+ let s: TokenStream = s.parse().expect("invalid token stream");
+ tokens.extend(s.into_iter().map(|mut t| {
+ t.set_span(span);
+ t
+ }));
+ }
+ }
+
+ macro_rules! push_punct {
+ ($name:ident $char1:tt) => {
+ pub fn $name(tokens: &mut TokenStream, span: Span) {
+ let mut punct = Punct::new($char1, Spacing::Alone);
+ punct.set_span(span);
+ tokens.append(punct);
+ }
+ };
+ ($name:ident $char1:tt $char2:tt) => {
+ pub fn $name(tokens: &mut TokenStream, span: Span) {
+ let mut punct = Punct::new($char1, Spacing::Joint);
+ punct.set_span(span);
+ tokens.append(punct);
+ let mut punct = Punct::new($char2, Spacing::Alone);
+ punct.set_span(span);
+ tokens.append(punct);
+ }
+ };
+ ($name:ident $char1:tt $char2:tt $char3:tt) => {
+ pub fn $name(tokens: &mut TokenStream, span: Span) {
+ let mut punct = Punct::new($char1, Spacing::Joint);
+ punct.set_span(span);
+ tokens.append(punct);
+ let mut punct = Punct::new($char2, Spacing::Joint);
+ punct.set_span(span);
+ tokens.append(punct);
+ let mut punct = Punct::new($char3, Spacing::Alone);
+ punct.set_span(span);
+ tokens.append(punct);
+ }
+ };
}
+
+ push_punct!(push_add '+');
+ push_punct!(push_add_eq '+' '=');
+ push_punct!(push_and '&');
+ push_punct!(push_and_and '&' '&');
+ push_punct!(push_and_eq '&' '=');
+ push_punct!(push_at '@');
+ push_punct!(push_bang '!');
+ push_punct!(push_caret '^');
+ push_punct!(push_caret_eq '^' '=');
+ push_punct!(push_colon ':');
+ push_punct!(push_colon2 ':' ':');
+ push_punct!(push_comma ',');
+ push_punct!(push_div '/');
+ push_punct!(push_div_eq '/' '=');
+ push_punct!(push_dot '.');
+ push_punct!(push_dot2 '.' '.');
+ push_punct!(push_dot3 '.' '.' '.');
+ push_punct!(push_dot_dot_eq '.' '.' '=');
+ push_punct!(push_eq '=');
+ push_punct!(push_eq_eq '=' '=');
+ push_punct!(push_ge '>' '=');
+ push_punct!(push_gt '>');
+ push_punct!(push_le '<' '=');
+ push_punct!(push_lt '<');
+ push_punct!(push_mul_eq '*' '=');
+ push_punct!(push_ne '!' '=');
+ push_punct!(push_or '|');
+ push_punct!(push_or_eq '|' '=');
+ push_punct!(push_or_or '|' '|');
+ push_punct!(push_pound '#');
+ push_punct!(push_question '?');
+ push_punct!(push_rarrow '-' '>');
+ push_punct!(push_larrow '<' '-');
+ push_punct!(push_rem '%');
+ push_punct!(push_rem_eq '%' '=');
+ push_punct!(push_fat_arrow '=' '>');
+ push_punct!(push_semi ';');
+ push_punct!(push_shl '<' '<');
+ push_punct!(push_shl_eq '<' '<' '=');
+ push_punct!(push_shr '>' '>');
+ push_punct!(push_shr_eq '>' '>' '=');
+ push_punct!(push_star '*');
+ push_punct!(push_sub '-');
+ push_punct!(push_sub_eq '-' '=');
}
/// The whole point.
@@ -132,10 +246,13 @@ pub mod __rt {
///
/// Variable interpolation is done with `#var` (similar to `$var` in
/// `macro_rules!` macros). This grabs the `var` variable that is currently in
-/// scope and inserts it in that location in the output tokens. The variable
-/// must implement the [`ToTokens`] trait.
+/// scope and inserts it in that location in the output tokens. Any type
+/// implementing the [`ToTokens`] trait can be interpolated. This includes most
+/// Rust primitive types as well as most of the syntax tree types from the [Syn]
+/// crate.
///
/// [`ToTokens`]: trait.ToTokens.html
+/// [Syn]: https://github.com/dtolnay/syn
///
/// Repetition is done using `#(...)*` or `#(...),*` again similar to
/// `macro_rules!`. This iterates through the elements of any variable
@@ -200,7 +317,7 @@ pub mod __rt {
/// #
/// # fn main() {}
/// ```
-#[macro_export]
+#[macro_export(local_inner_macros)]
macro_rules! quote {
($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::call_site()=> $($tt)*));
}
@@ -309,7 +426,7 @@ macro_rules! quote {
/// site. If we resolve `Sync` at the same span that the user's type is going to
/// be resolved, then they could bypass our check by defining their own trait
/// named `Sync` that is implemented for their type.
-#[macro_export]
+#[macro_export(local_inner_macros)]
macro_rules! quote_spanned {
($span:expr=> $($tt:tt)*) => {
{
@@ -325,7 +442,7 @@ macro_rules! quote_spanned {
//
// in: pounded_var_names!(then () a #b c #( #d )* #e)
// out: then!(() b d e)
-#[macro_export]
+#[macro_export(local_inner_macros)]
#[doc(hidden)]
macro_rules! pounded_var_names {
($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
@@ -370,7 +487,7 @@ macro_rules! pounded_var_names {
//
// in: nested_tuples_pat!(() a)
// out: a
-#[macro_export]
+#[macro_export(local_inner_macros)]
#[doc(hidden)]
macro_rules! nested_tuples_pat {
(()) => {
@@ -395,7 +512,7 @@ macro_rules! nested_tuples_pat {
//
// in: multi_zip_iter!(() a)
// out: a
-#[macro_export]
+#[macro_export(local_inner_macros)]
#[doc(hidden)]
macro_rules! multi_zip_expr {
(()) => {
@@ -419,7 +536,7 @@ macro_rules! multi_zip_expr {
};
}
-#[macro_export]
+#[macro_export(local_inner_macros)]
#[doc(hidden)]
macro_rules! quote_each_token {
($tokens:ident $span:ident) => {};
@@ -454,7 +571,7 @@ macro_rules! quote_each_token {
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Bracket,
- quote_spanned!($span=> $($inner)*).into(),
+ quote_spanned!($span=> $($inner)*),
);
g.set_span($span);
Some($crate::__rt::TokenTree::from(g))
@@ -471,7 +588,7 @@ macro_rules! quote_each_token {
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Parenthesis,
- quote_spanned!($span=> $($first)*).into(),
+ quote_spanned!($span=> $($first)*),
);
g.set_span($span);
Some($crate::__rt::TokenTree::from(g))
@@ -483,7 +600,7 @@ macro_rules! quote_each_token {
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Bracket,
- quote_spanned!($span=> $($first)*).into(),
+ quote_spanned!($span=> $($first)*),
);
g.set_span($span);
Some($crate::__rt::TokenTree::from(g))
@@ -495,7 +612,7 @@ macro_rules! quote_each_token {
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Brace,
- quote_spanned!($span=> $($first)*).into(),
+ quote_spanned!($span=> $($first)*),
);
g.set_span($span);
Some($crate::__rt::TokenTree::from(g))
@@ -503,9 +620,239 @@ macro_rules! quote_each_token {
quote_each_token!($tokens $span $($rest)*);
};
+ ($tokens:ident $span:ident + $($rest:tt)*) => {
+ $crate::__rt::push_add(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident += $($rest:tt)*) => {
+ $crate::__rt::push_add_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident & $($rest:tt)*) => {
+ $crate::__rt::push_and(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident && $($rest:tt)*) => {
+ $crate::__rt::push_and_and(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident &= $($rest:tt)*) => {
+ $crate::__rt::push_and_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident @ $($rest:tt)*) => {
+ $crate::__rt::push_at(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ! $($rest:tt)*) => {
+ $crate::__rt::push_bang(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ^ $($rest:tt)*) => {
+ $crate::__rt::push_caret(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ^= $($rest:tt)*) => {
+ $crate::__rt::push_caret_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident : $($rest:tt)*) => {
+ $crate::__rt::push_colon(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident :: $($rest:tt)*) => {
+ $crate::__rt::push_colon2(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident , $($rest:tt)*) => {
+ $crate::__rt::push_comma(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident / $($rest:tt)*) => {
+ $crate::__rt::push_div(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident /= $($rest:tt)*) => {
+ $crate::__rt::push_div_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident . $($rest:tt)*) => {
+ $crate::__rt::push_dot(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident .. $($rest:tt)*) => {
+ $crate::__rt::push_dot2(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ... $($rest:tt)*) => {
+ $crate::__rt::push_dot3(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ..= $($rest:tt)*) => {
+ $crate::__rt::push_dot_dot_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident = $($rest:tt)*) => {
+ $crate::__rt::push_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident == $($rest:tt)*) => {
+ $crate::__rt::push_eq_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident >= $($rest:tt)*) => {
+ $crate::__rt::push_ge(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident > $($rest:tt)*) => {
+ $crate::__rt::push_gt(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident <= $($rest:tt)*) => {
+ $crate::__rt::push_le(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident < $($rest:tt)*) => {
+ $crate::__rt::push_lt(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident *= $($rest:tt)*) => {
+ $crate::__rt::push_mul_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident != $($rest:tt)*) => {
+ $crate::__rt::push_ne(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident | $($rest:tt)*) => {
+ $crate::__rt::push_or(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident |= $($rest:tt)*) => {
+ $crate::__rt::push_or_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident || $($rest:tt)*) => {
+ $crate::__rt::push_or_or(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident # $($rest:tt)*) => {
+ $crate::__rt::push_pound(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ? $($rest:tt)*) => {
+ $crate::__rt::push_question(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident -> $($rest:tt)*) => {
+ $crate::__rt::push_rarrow(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident <- $($rest:tt)*) => {
+ $crate::__rt::push_larrow(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident % $($rest:tt)*) => {
+ $crate::__rt::push_rem(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident %= $($rest:tt)*) => {
+ $crate::__rt::push_rem_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident => $($rest:tt)*) => {
+ $crate::__rt::push_fat_arrow(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident ; $($rest:tt)*) => {
+ $crate::__rt::push_semi(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident << $($rest:tt)*) => {
+ $crate::__rt::push_shl(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident <<= $($rest:tt)*) => {
+ $crate::__rt::push_shl_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident >> $($rest:tt)*) => {
+ $crate::__rt::push_shr(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident >>= $($rest:tt)*) => {
+ $crate::__rt::push_shr_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident * $($rest:tt)*) => {
+ $crate::__rt::push_star(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident - $($rest:tt)*) => {
+ $crate::__rt::push_sub(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
+ ($tokens:ident $span:ident -= $($rest:tt)*) => {
+ $crate::__rt::push_sub_eq(&mut $tokens, $span);
+ quote_each_token!($tokens $span $($rest)*);
+ };
+
($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
- // TODO: this seems slow... special case some `:tt` arguments?
- $crate::__rt::parse(&mut $tokens, $span, stringify!($first));
+ $crate::__rt::parse(&mut $tokens, $span, quote_stringify!($first));
quote_each_token!($tokens $span $($rest)*);
};
}
+
+// Unhygienically invoke whatever `stringify` the caller has in scope i.e. not a
+// local macro. The macros marked `local_inner_macros` above cannot invoke
+// `stringify` directly.
+#[macro_export]
+#[doc(hidden)]
+macro_rules! quote_stringify {
+ ($tt:tt) => {
+ stringify!($tt)
+ };
+}
diff --git a/third_party/rust/quote/src/to_tokens.rs b/third_party/rust/quote/src/to_tokens.rs
index 4418cc4a6f8c5..4a9c8f11ec713 100644
--- a/third_party/rust/quote/src/to_tokens.rs
+++ b/third_party/rust/quote/src/to_tokens.rs
@@ -1,6 +1,7 @@
use super::TokenStreamExt;
use std::borrow::Cow;
+use std::iter;
use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
@@ -77,6 +78,12 @@ impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
}
}
+impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ (**self).to_tokens(tokens);
+ }
+}
+
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
fn to_tokens(&self, tokens: &mut TokenStream) {
(**self).to_tokens(tokens);
@@ -136,6 +143,12 @@ primitive! {
f64 => f64_suffixed
}
+#[cfg(integer128)]
+primitive! {
+ i128 => i128_suffixed
+ u128 => u128_suffixed
+}
+
impl ToTokens for char {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(Literal::character(*self));
@@ -181,7 +194,7 @@ impl ToTokens for TokenTree {
impl ToTokens for TokenStream {
fn to_tokens(&self, dst: &mut TokenStream) {
- dst.append_all(self.clone().into_iter());
+ dst.extend(iter::once(self.clone()));
}
fn into_token_stream(self) -> TokenStream {
diff --git a/third_party/rust/quote/tests/conditional/integer128.rs b/third_party/rust/quote/tests/conditional/integer128.rs
new file mode 100644
index 0000000000000..61e22744683b1
--- /dev/null
+++ b/third_party/rust/quote/tests/conditional/integer128.rs
@@ -0,0 +1,11 @@
+#[test]
+fn test_integer128() {
+ let ii128 = -1i128;
+ let uu128 = 1u128;
+
+ let tokens = quote! {
+ #ii128 #uu128
+ };
+ let expected = "-1i128 1u128";
+ assert_eq!(expected, tokens.to_string());
+}
diff --git a/third_party/rust/quote/tests/test.rs b/third_party/rust/quote/tests/test.rs
index 6f0ac8964ebea..f832da596b5d5 100644
--- a/third_party/rust/quote/tests/test.rs
+++ b/third_party/rust/quote/tests/test.rs
@@ -9,6 +9,11 @@ extern crate quote;
use proc_macro2::{Ident, Span, TokenStream};
use quote::TokenStreamExt;
+mod conditional {
+ #[cfg(integer128)]
+ mod integer128;
+}
+
struct X;
impl quote::ToTokens for X {
diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
index 3a59ad3bf6753..509ada56d2bd4 100644
--- a/third_party/rust/syn/.cargo-checksum.json
+++ b/third_party/rust/syn/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"cc823b5150d40948fb45042c1987dd8ede59ed6aa64003f622c4b901e319a218","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"9ca76bd182b81395755fbf0a4f1af086a46a90848f45b75210517004e751afe2","src/attr.rs":"ac161a3011bcd3381eb143e0954851fcff3e82e47075e6c9f6191ccf8ee67fee","src/buffer.rs":"a82b47bb12ec0de4159a7677712db5f4f54c145eb61aa88a1696d32cf143d50e","src/data.rs":"c34df5d623fed7a52be01fc625a4502f6ad97dc8feb25c51f7d57029dbdd91dd","src/derive.rs":"7a2246e8ee03fcbea040363d0b943daac696667ba799218231307c1b7a96aeed","src/error.rs":"93a265306ee6d265feeccb65b64e429ec6b4bb29d825cb52a319ea86e5cc1c11","src/export.rs":"39cc2468a141fb8229c9189dfe99875c278850714b1996e683a5b4cbc8aa3457","src/expr.rs":"91bab694502cebc56bdcd45219f1cf317ff857320d855e595ec2acc0f9ab781a","src/ext.rs":"4902ffc7dc25a1bb5813d5292a3df7cbf72ebad79def578c7cd231cf67e1785c","src/file.rs":"ebd350b5ff548cdbb21a0fadd5572575a216a1b0caef36dd46477ca324c7af6f","src/gen/fold.rs":"bfca5243b4694cc6a9b0d1f34ca53fa90387325fd4ee6bce024adb3ca42f4472","src/gen/visit.rs":"4d13b239db7c38a38f8ce5dfe827317ec4d35df83dd65ad5a350a3c882632bfd","src/gen/visit_mut.rs":"f5c8aea5907084c2425cdb613a07def41735c764022f7549876c9fa4c7170d5a","src/gen_helper.rs":"d128fbd24fadfc5634976bdb9188c649f9905718c9c987a2839c3e6134b155a2","src/generics.rs":"3b07bcfa970714c38c2b524da765e5b0465832d91aba9ed40b99b4aa7a7ea9c4","src/group.rs":"7faa0b3701b6e597a5766e316733cd4d88ecc2a24b54b233f33e28c23a7cbad8","src/ident.rs":"61534c48949ebfa03e948874ef64174e1c8111c3b953edd58f79748fe9e00507","src/item.rs":"897cfd8ea6f2ff1a664e2a5db84073f5ed1480318d14236c636c94608016b27c","src/keyword.rs":"0a8fd45d065c56532065f97fb097136b6f1a8efc393a0946e6a95217877616a9","src/lib.rs":"79664eb2d3570c2851c0d6e5dde4e9764619e14c5f107ff07d1416d2a15f8c1a","src/lifetime.rs":"3174a81cea3eef0ec1555900b96b1641d6d3ed3609bc17d72b02a1495377ac35","src/lit.rs":"661bf3ad4b49bc74dc808c1f1d584551689145d3c5fbadfcc28d157d5087981b","src/lookahead.rs":"07ce6d6915f24a01f86a486135eb841a3a01424148fe49ea797c5ffacf0c7673","src/mac.rs":"8a7efbdc9498612844c88660312117c401685bf300da7e91bef7effde3026019","src/macros.rs":"03d33128d9c2d2664cc2d3d158518c60cadad8b340945c6eb292fb7bd92e1b41","src/op.rs":"83bbe2199946abbf1d4a0bc6eb0d97285439f77e8e02646b8e3b707485e4301e","src/parse.rs":"248cfe3d826cf192efd5fef1b52db5075d3739e045f42157908596fc039a741b","src/parse_quote.rs":"e6f8101568d8430d6793183dfedfee01c2c479274ff7318dd221060ac140a477","src/path.rs":"e666c702d46e2849cdc37fddc2e1950659cd17611ebf988102f2bf0af72b6bd1","src/print.rs":"7ebb68123898f2ebbae12abf028747c05bea7b08f1e96b17164f1dcebdab7355","src/punctuated.rs":"01539dcb51c75e0fe0a4cdfb7716a909ce1bfd0767370c04043159a0a0dec154","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"0d9bdef967d339deae5e2229f9593f48b15af67cf1f79358aa464cacd173f32c","src/synom.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/token.rs":"40c406da738c52e52944585acc5ff36b75edb905b78cfb2bd74626663edb2c99","src/tt.rs":"6ff2559d5c5fcaa73e914cd0a4a5984ab21de7ea334f1c1498e73059d2d1f7d1","src/ty.rs":"503e0ae7da33ecd6d733aa3d14a45ced20696b9bdd52d3f9ef23fd31ec5651da"},"package":"455a6ec9b368f8c479b0ae5494d13b22dc00990d2f00d68c9dc6a2dc4f17f210"}
\ No newline at end of file
+{"files":{"Cargo.toml":"fdac1ee690fa3d33b906be9d09a8551741db11ddba7f755cb75c8d74fd2d918b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3f3d105c0f1bae3bdb5ed3cc32a8e5a02f3de6f62a9f17f5ba03af3f813d0881","src/attr.rs":"9210a8dc5fea0ee3004b14a9e2923142932c53986b56685d62d9aa115fe093b0","src/buffer.rs":"fac77febe022ab4982078c73ca502f9d698a402b3eca0f8e9c7a2796af01e5f4","src/data.rs":"54ee54c3c650bc5e200d4bea947d6e89093a39f2444cd43e8861f5852975e0bc","src/derive.rs":"eb041b47a73bace73c2872cd9a8e190de7e2b3b57cd57213770f088ec19ab3c6","src/error.rs":"0bcf09b0892c6d5f5f31f7f758866ded1e47463833cad60694329348ac1fb44a","src/export.rs":"1f7e017dac9426e91c796445e733c390c408c808ba9c21cb7df7799b67352fde","src/expr.rs":"3fe98f69af9be770d4d549d869d35136f3dea66452e4cb4b9e387982e3b7aea2","src/ext.rs":"1881179e634681cdd472ecac502192b5a5a7759056d1c49497706749fdab1bdf","src/file.rs":"abb9f5e71a8a6b52649c15da21d3f99e25a727d87c2f87d2744ac3970c1d683f","src/gen/fold.rs":"7f7ab907e3c17b503a72707e2f8d9fc29d9139269d810ea9b6511af9de7d3080","src/gen/visit.rs":"0a4543ac9f82d8ab7ccf02c55e8325ff9b859e36ea08d3e187a4836f470eef1c","src/gen/visit_mut.rs":"66441522f544056cd464740e8ba614688a5f417857c649550eeba2b589ef6096","src/gen_helper.rs":"644b1d31987919c0239891d8f90c09f3bf541a71fb85e744438e4814cc6dff89","src/generics.rs":"6ee5bba532b95f7de7c6bbe8caadabc6a71c45e7f8d7636d823075ff27f28937","src/group.rs":"03487f75d0abd302f06e50eb09d14ab83fb60c67e1f2602be53ca3f28a833b90","src/ident.rs":"61534c48949ebfa03e948874ef64174e1c8111c3b953edd58f79748fe9e00507","src/item.rs":"6799adb332bedaa68c3801055e71a5665cc7b9c4ba53960f6b91487408e7e10c","src/keyword.rs":"7dde0b4a0d70527921be16f50f8d88e4b5ad7e5fd70e9badd2bb351fd796dfb8","src/lib.rs":"254744712dae7c30ce7f97ce4685fc6256cf125e7872397d3dd9e32632273262","src/lifetime.rs":"7912a4c77ee805e912fb28c7f434836ea82540263d549877cd5edfbe32d1bf95","src/lit.rs":"b6aef4f2787201edbeb85529fc0c333bd8083d697a08f28c812b6b2f765939f5","src/lookahead.rs":"5b3c55ae8c1b1d0ed813c296dc6fa586379a99e7792a3cb0d634ae6ca74f54b5","src/mac.rs":"a91623ed9c1de7b18ef752db79a242002e95156497a52a1790a75069915d22ee","src/macros.rs":"2f91e07a1aec4b385986c0a0f66274e8de1c1aa81f95d398a5cd364b3c451bb4","src/op.rs":"01edb1e07b6d60b266797ca4b30788b0a511452228e04073a11f0b61f106a0e7","src/parse.rs":"d907b9822943bafbcb1e005f09a145e46c162e7702fce703b57f9b7ccbdf85a2","src/parse_macro_input.rs":"8df7b4c1b361171f3fefb0490dec570ad29c024c04e35184b296725f97f2002c","src/parse_quote.rs":"d5e613fbba06900d882f2aaa042f10c1bee1b1dffaa1d9ee9a73d1e504a08fad","src/path.rs":"d6a319db75e4b34783912aed0ddfad92cdec05798d8d378f2f23231437cab3e1","src/print.rs":"7ebb68123898f2ebbae12abf028747c05bea7b08f1e96b17164f1dcebdab7355","src/punctuated.rs":"5ad6885e602cb1c79b49f11e1c739bdb7c33ecfa1ca9c709d711b0778ae48085","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"83b4ab1e2138ac9340eaa8234ad1d9f7468b450ddf3a852e574cac18e4f766b8","src/synom.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/thread.rs":"798dd0a6cca7807f1d55c1f79cec967314a75c4e8e2cfdb5644499c22618b307","src/token.rs":"8fa7ffb89da61d5187ab0ff40de7b3b1135ace6cf770c8d84fce8371854698a9","src/tt.rs":"b3d99cbd68cd50749f26f4afa138e6366d327099ed566b30c315ccb58fa26ded","src/ty.rs":"4ac9d1b84f9bf269516348e1b923b1c8e3f7562b98ec7ef66174c31fffb8dce5"},"package":"734ecc29cd36e8123850d9bf21dfd62ef8300aaa8f879aabaa899721808be37c"}
\ No newline at end of file
diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
index 6ad286183f1d7..e3248e172143a 100644
--- a/third_party/rust/syn/Cargo.toml
+++ b/third_party/rust/syn/Cargo.toml
@@ -12,7 +12,7 @@
[package]
name = "syn"
-version = "0.15.7"
+version = "0.15.24"
authors = ["David Tolnay "]
include = ["/Cargo.toml", "/src/**/*.rs", "/README.md", "/LICENSE-APACHE", "/LICENSE-MIT"]
description = "Parser for Rust source code"
diff --git a/third_party/rust/syn/LICENSE-MIT b/third_party/rust/syn/LICENSE-MIT
index 5767dea4b109b..31aa79387f27e 100644
--- a/third_party/rust/syn/LICENSE-MIT
+++ b/third_party/rust/syn/LICENSE-MIT
@@ -1,5 +1,3 @@
-Copyright (c) 2018 Syn Developers
-
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
index e79b382572b49..ecc6eb878294d 100644
--- a/third_party/rust/syn/README.md
+++ b/third_party/rust/syn/README.md
@@ -81,12 +81,9 @@ proc-macro = true
```rust
extern crate proc_macro;
-extern crate syn;
-
-#[macro_use]
-extern crate quote;
use proc_macro::TokenStream;
+use quote::quote;
use syn::{parse_macro_input, DeriveInput};
#[proc_macro_derive(MyMacro)]
@@ -105,7 +102,7 @@ pub fn my_macro(input: TokenStream) -> TokenStream {
```
The [`heapsize`] example directory shows a complete working Macros 1.1
-implementation of a custom derive. It works on any Rust compiler \>=1.15.0. The
+implementation of a custom derive. It works on any Rust compiler 1.15+. The
example derives a `HeapSize` trait which computes an estimate of the amount of
heap memory owned by a value.
@@ -133,13 +130,6 @@ struct Demo<'a, T: ?Sized> {
## Spans and error reporting
-The [`heapsize2`] example directory is an extension of the `heapsize` example
-that demonstrates some of the hygiene and error reporting properties of Macros
-2.0. This example currently requires a nightly Rust compiler \>=1.24.0-nightly
-but we are working to stabilize all of the APIs involved.
-
-[`heapsize2`]: examples/heapsize2
-
The token-based procedural macro API provides great control over where the
compiler's error messages are displayed in user code. Consider the error the
user sees if one of their field types does not implement `HeapSize`.
@@ -152,20 +142,8 @@ struct Broken {
}
```
-In the Macros 1.1 string-based procedural macro world, the resulting error would
-point unhelpfully to the invocation of the derive macro and not to the actual
-problematic field.
-
-```
-error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope
- --> src/main.rs:4:10
- |
-4 | #[derive(HeapSize)]
- | ^^^^^^^^
-```
-
By tracking span information all the way through the expansion of a procedural
-macro as shown in the `heapsize2` example, token-based macros in Syn are able to
+macro as shown in the `heapsize` example, token-based macros in Syn are able to
trigger errors that directly pinpoint the source of the problem.
```
@@ -246,24 +224,21 @@ available.
- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
library libproc_macro from rustc toolchain.
-## Nightly features
+## Proc macro shim
-By default Syn uses the [`proc-macro2`] crate to emulate the nightly compiler's
-procedural macro API in a stable way that works all the way back to Rust 1.15.0.
-This shim makes it possible to write code without regard for whether the current
-compiler version supports the features we use.
+Syn uses the [proc-macro2] crate to emulate the compiler's procedural macro API
+in a stable way that works all the way back to Rust 1.15.0. This shim makes it
+possible to write code without regard for whether the current compiler version
+supports the features we use.
-[`proc-macro2`]: https://github.com/alexcrichton/proc-macro2
+In general all of your code should be written against proc-macro2 rather than
+proc-macro. The one exception is in the signatures of procedural macro entry
+points, which are required by the language to use `proc_macro::TokenStream`.
-On a nightly compiler, to eliminate the stable shim and use the compiler's
-`proc-macro` directly, add `proc-macro2` to your Cargo.toml and set its
-`"nightly"` feature which bypasses the stable shim.
+The proc-macro2 crate will automatically detect and use the compiler's data
+structures on sufficiently new compilers.
-```toml
-[dependencies]
-syn = "0.15"
-proc-macro2 = { version = "0.4", features = ["nightly"] }
-```
+[proc-macro2]: https://github.com/alexcrichton/proc-macro2
## License
diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
index 2e0cccbb8efe2..d1e7dca7c0e40 100644
--- a/third_party/rust/syn/src/attr.rs
+++ b/third_party/rust/syn/src/attr.rs
@@ -1,17 +1,11 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
use punctuated::Punctuated;
use std::iter;
-use proc_macro2::{Delimiter, Spacing, TokenStream, TokenTree};
+use proc_macro2::TokenStream;
+#[cfg(not(feature = "parsing"))]
+use proc_macro2::{Delimiter, Spacing, TokenTree};
#[cfg(feature = "parsing")]
use parse::{ParseStream, Result};
@@ -55,11 +49,11 @@ ast_struct! {
/// path tts path tts
/// ```
///
- /// Use the [`interpret_meta`] method to try parsing the tokens of an
- /// attribute into the structured representation that is used by convention
- /// across most Rust libraries.
+ /// Use the [`parse_meta`] method to try parsing the tokens of an attribute
+ /// into the structured representation that is used by convention across
+ /// most Rust libraries.
///
- /// [`interpret_meta`]: #method.interpret_meta
+ /// [`parse_meta`]: #method.parse_meta
///
/// # Parsing
///
@@ -75,12 +69,9 @@ ast_struct! {
/// [`Attribute::parse_outer`]: #method.parse_outer
/// [`Attribute::parse_inner`]: #method.parse_inner
///
- /// ```
- /// #[macro_use]
- /// extern crate syn;
- ///
- /// use syn::{Attribute, Ident};
- /// use syn::parse::{Parse, ParseStream, Result};
+ /// ```edition2018
+ /// use syn::{Attribute, Ident, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
///
/// // Parses a unit struct with attributes.
/// //
@@ -103,8 +94,6 @@ ast_struct! {
/// })
/// }
/// }
- /// #
- /// # fn main() {}
/// ```
pub struct Attribute #manual_extra_traits {
pub pound_token: Token![#],
@@ -146,32 +135,65 @@ impl Hash for Attribute {
impl Attribute {
/// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if
/// possible.
+ ///
+ /// Deprecated; use `parse_meta` instead.
+ #[doc(hidden)]
pub fn interpret_meta(&self) -> Option {
- let name = if self.path.segments.len() == 1 {
- &self.path.segments.first().unwrap().value().ident
- } else {
- return None;
- };
-
- if self.tts.is_empty() {
- return Some(Meta::Word(name.clone()));
+ #[cfg(feature = "parsing")]
+ {
+ self.parse_meta().ok()
}
- let tts = self.tts.clone().into_iter().collect::>();
+ #[cfg(not(feature = "parsing"))]
+ {
+ let name = if self.path.segments.len() == 1 {
+ &self.path.segments.first().unwrap().value().ident
+ } else {
+ return None;
+ };
- if tts.len() == 1 {
- if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
- return Some(meta);
+ if self.tts.is_empty() {
+ return Some(Meta::Word(name.clone()));
+ }
+
+ let tts = self.tts.clone().into_iter().collect::>();
+
+ if tts.len() == 1 {
+ if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
+ return Some(meta);
+ }
}
- }
- if tts.len() == 2 {
- if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
- return Some(meta);
+ if tts.len() == 2 {
+ if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
+ return Some(meta);
+ }
}
+
+ None
+ }
+ }
+
+ /// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if
+ /// possible.
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result {
+ if let Some(ref colon) = self.path.leading_colon {
+ return Err(Error::new(colon.spans[0], "expected meta identifier"));
}
- None
+ let first_segment = self
+ .path
+ .segments
+ .first()
+ .expect("paths have at least one segment");
+ if let Some(colon) = first_segment.punct() {
+ return Err(Error::new(colon.spans[0], "expected meta value"));
+ }
+ let ident = first_segment.value().ident.clone();
+
+ let parser = |input: ParseStream| parsing::parse_meta_after_ident(ident, input);
+ parse::Parser::parse2(parser, self.tts.clone())
}
/// Parses zero or more outer attributes from the stream.
@@ -200,6 +222,7 @@ impl Attribute {
Ok(attrs)
}
+ #[cfg(not(feature = "parsing"))]
fn extract_meta_list(ident: Ident, tt: &TokenTree) -> Option {
let g = match *tt {
TokenTree::Group(ref g) => g,
@@ -220,6 +243,7 @@ impl Attribute {
}))
}
+ #[cfg(not(feature = "parsing"))]
fn extract_name_value(ident: Ident, a: &TokenTree, b: &TokenTree) -> Option {
let a = match *a {
TokenTree::Punct(ref o) => o,
@@ -256,6 +280,7 @@ impl Attribute {
}
}
+#[cfg(not(feature = "parsing"))]
fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[TokenTree])> {
assert!(!tts.is_empty());
@@ -297,6 +322,7 @@ fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[Toke
}
}
+#[cfg(not(feature = "parsing"))]
fn list_of_nested_meta_items_from_tokens(
mut tts: &[TokenTree],
) -> Option> {
@@ -438,6 +464,43 @@ ast_enum_of_structs! {
}
}
+/// Conventional argument type associated with an invocation of an attribute
+/// macro.
+///
+/// For example if we are developing an attribute macro that is intended to be
+/// invoked on function items as follows:
+///
+/// ```edition2018
+/// # const IGNORE: &str = stringify! {
+/// #[my_attribute(path = "/v1/refresh")]
+/// # };
+/// pub fn refresh() {
+/// /* ... */
+/// }
+/// ```
+///
+/// The implementation of this macro would want to parse its attribute arguments
+/// as type `AttributeArgs`.
+///
+/// ```edition2018
+/// extern crate proc_macro;
+///
+/// use proc_macro::TokenStream;
+/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+///
+/// # const IGNORE: &str = stringify! {
+/// #[proc_macro_attribute]
+/// # };
+/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
+/// let args = parse_macro_input!(args as AttributeArgs);
+/// let input = parse_macro_input!(input as ItemFn);
+///
+/// /* ... */
+/// # "".parse().unwrap()
+/// }
+/// ```
+pub type AttributeArgs = Vec;
+
pub trait FilterAttrs<'a> {
type Ret: Iterator- ;
@@ -452,6 +515,7 @@ where
type Ret = iter::Filter bool>;
fn outer(self) -> Self::Ret {
+ #[cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
fn is_outer(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Outer => true,
@@ -462,6 +526,7 @@ where
}
fn inner(self) -> Self::Ret {
+ #[cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
fn is_inner(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Inner(_) => true,
@@ -476,7 +541,8 @@ where
pub mod parsing {
use super::*;
- use parse::{ParseStream, Result};
+ use ext::IdentExt;
+ use parse::{Parse, ParseStream, Result};
#[cfg(feature = "full")]
use private;
@@ -510,6 +576,71 @@ pub mod parsing {
attrs
}
}
+
+ impl Parse for Meta {
+ fn parse(input: ParseStream) -> Result {
+ let ident = input.call(Ident::parse_any)?;
+ parse_meta_after_ident(ident, input)
+ }
+ }
+
+ impl Parse for MetaList {
+ fn parse(input: ParseStream) -> Result {
+ let ident = input.call(Ident::parse_any)?;
+ parse_meta_list_after_ident(ident, input)
+ }
+ }
+
+ impl Parse for MetaNameValue {
+ fn parse(input: ParseStream) -> Result {
+ let ident = input.call(Ident::parse_any)?;
+ parse_meta_name_value_after_ident(ident, input)
+ }
+ }
+
+ impl Parse for NestedMeta {
+ fn parse(input: ParseStream) -> Result {
+ let ahead = input.fork();
+
+ if ahead.peek(Lit) && !(ahead.peek(LitBool) && ahead.peek2(Token![=])) {
+ input.parse().map(NestedMeta::Literal)
+ } else if ahead.call(Ident::parse_any).is_ok() {
+ input.parse().map(NestedMeta::Meta)
+ } else {
+ Err(input.error("expected identifier or literal"))
+ }
+ }
+ }
+
+ pub fn parse_meta_after_ident(ident: Ident, input: ParseStream) -> Result {
+ if input.peek(token::Paren) {
+ parse_meta_list_after_ident(ident, input).map(Meta::List)
+ } else if input.peek(Token![=]) {
+ parse_meta_name_value_after_ident(ident, input).map(Meta::NameValue)
+ } else {
+ Ok(Meta::Word(ident))
+ }
+ }
+
+ fn parse_meta_list_after_ident(ident: Ident, input: ParseStream) -> Result {
+ let content;
+ Ok(MetaList {
+ ident: ident,
+ paren_token: parenthesized!(content in input),
+ nested: content.parse_terminated(NestedMeta::parse)?,
+ })
+ }
+
+ fn parse_meta_name_value_after_ident(
+ ident: Ident,
+ input: ParseStream,
+ ) -> Result {
+ Ok(MetaNameValue {
+ ident: ident,
+ eq_token: input.parse()?,
+ lit: input.parse()?,
+ })
+ }
}
#[cfg(feature = "printing")]
diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
index e8e8c1a8a8ae7..8c326451eb938 100644
--- a/third_party/rust/syn/src/buffer.rs
+++ b/third_party/rust/syn/src/buffer.rs
@@ -1,19 +1,11 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
//! A stably addressed token buffer supporting efficient traversal based on a
//! cheaply copyable cursor.
//!
//! *This module is available if Syn is built with the `"parsing"` feature.*
-// This module is heavily commented as it contains the only unsafe code in Syn,
-// and caution should be used when editing it. The public-facing interface is
-// 100% safe but the implementation is fragile internally.
+// This module is heavily commented as it contains most of the unsafe code in
+// Syn, and caution should be used when editing it. The public-facing interface
+// is 100% safe but the implementation is fragile internally.
#[cfg(all(
not(all(target_arch = "wasm32", target_os = "unknown")),
@@ -25,6 +17,7 @@ use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenS
use std::marker::PhantomData;
use std::ptr;
+use private;
use Lifetime;
/// Internal type which is used instead of `TokenTree` to represent a token tree
@@ -353,3 +346,21 @@ impl<'a> Cursor<'a> {
}
}
}
+
+impl private {
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn open_span_of_group(cursor: Cursor) -> Span {
+ match *cursor.entry() {
+ Entry::Group(ref group, _) => group.span_open(),
+ _ => cursor.span(),
+ }
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn close_span_of_group(cursor: Cursor) -> Span {
+ match *cursor.entry() {
+ Entry::Group(ref group, _) => group.span_close(),
+ _ => cursor.span(),
+ }
+ }
+}
diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
index 4f0986a3f1050..f54f5b9afe3ca 100644
--- a/third_party/rust/syn/src/data.rs
+++ b/third_party/rust/syn/src/data.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
use punctuated::Punctuated;
diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
index 5c1bb17c6e10d..48ae7e46ee276 100644
--- a/third_party/rust/syn/src/derive.rs
+++ b/third_party/rust/syn/src/derive.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
use punctuated::Punctuated;
diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
index 712b9b7d8260f..e07051d6e021c 100644
--- a/third_party/rust/syn/src/error.rs
+++ b/third_party/rust/syn/src/error.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use std;
use std::fmt::{self, Display};
use std::iter::FromIterator;
@@ -13,8 +5,14 @@ use std::iter::FromIterator;
use proc_macro2::{
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
};
+#[cfg(feature = "printing")]
+use quote::ToTokens;
+#[cfg(feature = "parsing")]
use buffer::Cursor;
+#[cfg(all(procmacro2_semver_exempt, feature = "parsing"))]
+use private;
+use thread::ThreadBound;
/// The result of a Syn parser.
pub type Result = std::result::Result;
@@ -26,12 +24,23 @@ pub type Result = std::result::Result;
/// [module documentation]: index.html
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
-#[derive(Debug, Clone)]
+#[derive(Debug)]
pub struct Error {
- span: Span,
+ // Span is implemented as an index into a thread-local interner to keep the
+ // size small. It is not safe to access from a different thread. We want
+ // errors to be Send and Sync to play nicely with the Failure crate, so pin
+ // the span we're given to its original thread and assume it is
+ // Span::call_site if accessed from any other thread.
+ start_span: ThreadBound,
+ end_span: ThreadBound,
message: String,
}
+#[cfg(test)]
+struct _Test
+where
+ Error: Send + Sync;
+
impl Error {
/// Usually the [`ParseStream::error`] method will be used instead, which
/// automatically uses the correct span from the current position of the
@@ -44,12 +53,9 @@ impl Error {
///
/// # Example
///
- /// ```
- /// #[macro_use]
- /// extern crate syn;
- ///
- /// use syn::{Ident, LitStr};
- /// use syn::parse::{Error, ParseStream, Result};
+ /// ```edition2018
+ /// use syn::{Error, Ident, LitStr, Result, Token};
+ /// use syn::parse::ParseStream;
///
/// // Parses input that looks like `name = "string"` where the key must be
/// // the identifier `name` and the value may be any string literal.
@@ -65,18 +71,63 @@ impl Error {
/// let s: LitStr = input.parse()?;
/// Ok(s)
/// }
- /// #
- /// # fn main() {}
/// ```
pub fn new(span: Span, message: T) -> Self {
Error {
- span: span,
+ start_span: ThreadBound::new(span),
+ end_span: ThreadBound::new(span),
+ message: message.to_string(),
+ }
+ }
+
+ /// Creates an error with the specified message spanning the given syntax
+ /// tree node.
+ ///
+ /// Unlike the `Error::new` constructor, this constructor takes an argument
+ /// `tokens` which is a syntax tree node. This allows the resulting `Error`
+ /// to attempt to span all tokens inside of `tokens`. While you would
+ /// typically be able to use the `Spanned` trait with the above `Error::new`
+ /// constructor, implementation limitations today mean that
+ /// `Error::new_spanned` may provide a higher-quality error message on
+ /// stable Rust.
+ ///
+ /// When in doubt it's recommended to stick to `Error::new` (or
+ /// `ParseStream::error`)!
+ #[cfg(feature = "printing")]
+ pub fn new_spanned(tokens: T, message: U) -> Self {
+ let mut iter = tokens.into_token_stream().into_iter();
+ let start = iter.next().map_or_else(Span::call_site, |t| t.span());
+ let end = iter.last().map_or(start, |t| t.span());
+ Error {
+ start_span: ThreadBound::new(start),
+ end_span: ThreadBound::new(end),
message: message.to_string(),
}
}
+ /// The source location of the error.
+ ///
+ /// Spans are not thread-safe so this function returns `Span::call_site()`
+ /// if called from a different thread than the one on which the `Error` was
+ /// originally created.
pub fn span(&self) -> Span {
- self.span
+ let start = match self.start_span.get() {
+ Some(span) => *span,
+ None => return Span::call_site(),
+ };
+
+ #[cfg(procmacro2_semver_exempt)]
+ {
+ let end = match self.end_span.get() {
+ Some(span) => *span,
+ None => return Span::call_site(),
+ };
+ start.join(end).unwrap_or(start)
+ }
+ #[cfg(not(procmacro2_semver_exempt))]
+ {
+ start
+ }
}
/// Render the error as an invocation of [`compile_error!`].
@@ -87,34 +138,46 @@ impl Error {
/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
/// [`parse_macro_input!`]: ../macro.parse_macro_input.html
pub fn to_compile_error(&self) -> TokenStream {
+ let start = self
+ .start_span
+ .get()
+ .cloned()
+ .unwrap_or_else(Span::call_site);
+ let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
+
// compile_error!($message)
TokenStream::from_iter(vec![
- TokenTree::Ident(Ident::new("compile_error", self.span)),
+ TokenTree::Ident(Ident::new("compile_error", start)),
TokenTree::Punct({
let mut punct = Punct::new('!', Spacing::Alone);
- punct.set_span(self.span);
+ punct.set_span(start);
punct
}),
TokenTree::Group({
let mut group = Group::new(Delimiter::Brace, {
TokenStream::from_iter(vec![TokenTree::Literal({
let mut string = Literal::string(&self.message);
- string.set_span(self.span);
+ string.set_span(end);
string
})])
});
- group.set_span(self.span);
+ group.set_span(end);
group
}),
])
}
}
+#[cfg(feature = "parsing")]
pub fn new_at(scope: Span, cursor: Cursor, message: T) -> Error {
if cursor.eof() {
Error::new(scope, format!("unexpected end of input, {}", message))
} else {
- Error::new(cursor.span(), message)
+ #[cfg(procmacro2_semver_exempt)]
+ let span = private::open_span_of_group(cursor);
+ #[cfg(not(procmacro2_semver_exempt))]
+ let span = cursor.span();
+ Error::new(span, message)
}
}
@@ -124,6 +187,22 @@ impl Display for Error {
}
}
+impl Clone for Error {
+ fn clone(&self) -> Self {
+ let start = self
+ .start_span
+ .get()
+ .cloned()
+ .unwrap_or_else(Span::call_site);
+ let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
+ Error {
+ start_span: ThreadBound::new(start),
+ end_span: ThreadBound::new(end),
+ message: self.message.clone(),
+ }
+ }
+}
+
impl std::error::Error for Error {
fn description(&self) -> &str {
"parse error"
diff --git a/third_party/rust/syn/src/export.rs b/third_party/rust/syn/src/export.rs
index cc7a22245dd04..8e270bd017a54 100644
--- a/third_party/rust/syn/src/export.rs
+++ b/third_party/rust/syn/src/export.rs
@@ -8,6 +8,9 @@ pub use std::marker::Copy;
pub use std::option::Option::{None, Some};
pub use std::result::Result::{Err, Ok};
+#[cfg(feature = "printing")]
+pub extern crate quote;
+
pub use proc_macro2::{Span, TokenStream as TokenStream2};
pub use span::IntoSpans;
diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
index d4b689ad349b1..fe067cb4691a2 100644
--- a/third_party/rust/syn/src/expr.rs
+++ b/third_party/rust/syn/src/expr.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
use proc_macro2::{Span, TokenStream};
use punctuated::Punctuated;
@@ -27,7 +19,7 @@ ast_enum_of_structs! {
/// This type is a syntax tree enum. In Syn this and other syntax tree enums
/// are designed to be traversed using the following rebinding idiom.
///
- /// ```
+ /// ```edition2018
/// # use syn::Expr;
/// #
/// # fn example(expr: Expr) {
@@ -60,7 +52,7 @@ ast_enum_of_structs! {
///
/// The pattern is similar if the input expression is borrowed:
///
- /// ```
+ /// ```edition2018
/// # use syn::Expr;
/// #
/// # fn example(expr: &Expr) {
@@ -74,7 +66,7 @@ ast_enum_of_structs! {
///
/// This approach avoids repeating the variant names twice on every line.
///
- /// ```
+ /// ```edition2018
/// # use syn::{Expr, ExprMethodCall};
/// #
/// # fn example(expr: Expr) {
@@ -89,7 +81,7 @@ ast_enum_of_structs! {
/// In general, the name to which a syntax tree enum variant is bound should
/// be a suitable name for the complete syntax tree enum type.
///
- /// ```
+ /// ```edition2018
/// # use syn::{Expr, ExprField};
/// #
/// # fn example(discriminant: &ExprField) {
@@ -768,9 +760,6 @@ ast_enum_of_structs! {
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
- // Clippy false positive
- // https://github.com/Manishearth/rust-clippy/issues/1241
- #[cfg_attr(feature = "cargo-clippy", allow(enum_variant_names))]
pub enum Pat {
/// A pattern that matches any value: `_`.
///
@@ -922,7 +911,7 @@ ast_struct! {
///
/// As in:
///
- /// ```rust
+ /// ```edition2018
/// # fn f() -> bool {
/// # let n = 0;
/// match n {
@@ -1897,7 +1886,7 @@ pub mod parsing {
let mut arms = Vec::new();
while !content.is_empty() {
- arms.push(content.call(match_arm)?);
+ arms.push(content.call(Arm::parse)?);
}
Ok(ExprMatch {
@@ -1933,52 +1922,6 @@ pub mod parsing {
})
}
- #[cfg(feature = "full")]
- fn match_arm(input: ParseStream) -> Result {
- let requires_comma;
- Ok(Arm {
- attrs: input.call(Attribute::parse_outer)?,
- leading_vert: input.parse()?,
- pats: {
- let mut pats = Punctuated::new();
- let value: Pat = input.parse()?;
- pats.push_value(value);
- loop {
- if !input.peek(Token![|]) {
- break;
- }
- let punct = input.parse()?;
- pats.push_punct(punct);
- let value: Pat = input.parse()?;
- pats.push_value(value);
- }
- pats
- },
- guard: {
- if input.peek(Token![if]) {
- let if_token: Token![if] = input.parse()?;
- let guard: Expr = input.parse()?;
- Some((if_token, Box::new(guard)))
- } else {
- None
- }
- },
- fat_arrow_token: input.parse()?,
- body: {
- let body = input.call(expr_early)?;
- requires_comma = requires_terminator(&body);
- Box::new(body)
- },
- comma: {
- if requires_comma && !input.is_empty() {
- Some(input.parse()?)
- } else {
- input.parse()?
- }
- },
- })
- }
-
#[cfg(feature = "full")]
fn expr_closure(input: ParseStream, allow_struct: AllowStruct) -> Result {
let asyncness: Option = input.parse()?;
@@ -2348,12 +2291,9 @@ pub mod parsing {
///
/// # Example
///
- /// ```
- /// #[macro_use]
- /// extern crate syn;
- ///
- /// use syn::{token, Attribute, Block, Ident, Stmt};
- /// use syn::parse::{Parse, ParseStream, Result};
+ /// ```edition2018
+ /// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token};
+ /// use syn::parse::{Parse, ParseStream};
///
/// // Parse a function with no generics or parameter list.
/// //
@@ -2394,8 +2334,6 @@ pub mod parsing {
/// })
/// }
/// }
- /// #
- /// # fn main() {}
/// ```
pub fn parse_within(input: ParseStream) -> Result> {
let mut stmts = Vec::new();
@@ -2570,23 +2508,28 @@ pub mod parsing {
|| input.peek2(Token![!])
|| input.peek2(token::Brace)
|| input.peek2(token::Paren)
- || input.peek2(Token![..]) && !{
- let ahead = input.fork();
- ahead.parse::()?;
- ahead.parse::()?;
- ahead.is_empty() || ahead.peek(Token![,])
- }
+ || input.peek2(Token![..])
+ && !{
+ let ahead = input.fork();
+ ahead.parse::()?;
+ ahead.parse::()?;
+ ahead.is_empty() || ahead.peek(Token![,])
+ }
})
+ || input.peek(Token![self]) && input.peek2(Token![::])
|| input.peek(Token![::])
|| input.peek(Token![<])
- || input.peek(Token![self])
|| input.peek(Token![Self])
|| input.peek(Token![super])
|| input.peek(Token![extern])
|| input.peek(Token![crate])
{
pat_path_or_macro_or_struct_or_range(input)
- } else if input.peek(Token![ref]) || input.peek(Token![mut]) || input.peek(Ident) {
+ } else if input.peek(Token![ref])
+ || input.peek(Token![mut])
+ || input.peek(Token![self])
+ || input.peek(Ident)
+ {
input.call(pat_ident).map(Pat::Ident)
} else if lookahead.peek(token::Paren) {
input.call(pat_tuple).map(Pat::Tuple)
@@ -2782,6 +2725,54 @@ pub mod parsing {
}
}
+ #[cfg(feature = "full")]
+ impl Parse for Arm {
+ fn parse(input: ParseStream) -> Result {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+ leading_vert: input.parse()?,
+ pats: {
+ let mut pats = Punctuated::new();
+ let value: Pat = input.parse()?;
+ pats.push_value(value);
+ loop {
+ if !input.peek(Token![|]) {
+ break;
+ }
+ let punct = input.parse()?;
+ pats.push_punct(punct);
+ let value: Pat = input.parse()?;
+ pats.push_value(value);
+ }
+ pats
+ },
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+ let guard: Expr = input.parse()?;
+ Some((if_token, Box::new(guard)))
+ } else {
+ None
+ }
+ },
+ fat_arrow_token: input.parse()?,
+ body: {
+ let body = input.call(expr_early)?;
+ requires_comma = requires_terminator(&body);
+ Box::new(body)
+ },
+ comma: {
+ if requires_comma && !input.is_empty() {
+ Some(input.parse()?)
+ } else {
+ input.parse()?
+ }
+ },
+ })
+ }
+ }
+
impl Parse for Index {
fn parse(input: ParseStream) -> Result {
let lit: LitInt = input.parse()?;
diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
index ae6c2aeec8dd2..ca6d70d2aa9e3 100644
--- a/third_party/rust/syn/src/ext.rs
+++ b/third_party/rust/syn/src/ext.rs
@@ -17,13 +17,10 @@ pub trait IdentExt: Sized + private::Sealed {
/// This is useful when parsing a DSL which allows Rust keywords as
/// identifiers.
///
- /// ```rust
- /// #[macro_use]
- /// extern crate syn;
- ///
- /// use syn::Ident;
+ /// ```edition2018
+ /// use syn::{Error, Ident, Result, Token};
/// use syn::ext::IdentExt;
- /// use syn::parse::{Error, ParseStream, Result};
+ /// use syn::parse::ParseStream;
///
/// // Parses input that looks like `name = NAME` where `NAME` can be
/// // any identifier.
@@ -41,8 +38,6 @@ pub trait IdentExt: Sized + private::Sealed {
/// let name = input.call(Ident::parse_any)?;
/// Ok(name)
/// }
- /// #
- /// # fn main() {}
/// ```
fn parse_any(input: ParseStream) -> Result;
}
diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
index 1f6054ae4d3ad..30defe7df1051 100644
--- a/third_party/rust/syn/src/file.rs
+++ b/third_party/rust/syn/src/file.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
ast_struct! {
@@ -18,9 +10,7 @@ ast_struct! {
/// Parse a Rust source file into a `syn::File` and print out a debug
/// representation of the syntax tree.
///
- /// ```
- /// # extern crate syn;
- /// #
+ /// ```edition2018
/// use std::env;
/// use std::fs::File;
/// use std::io::Read;
diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
index 2b65e2e55b2b9..3eb18753e9bf6 100644
--- a/third_party/rust/syn/src/gen/fold.rs
+++ b/third_party/rust/syn/src/gen/fold.rs
@@ -1,7 +1,6 @@
// THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
#![allow(unreachable_code)]
-#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
#[cfg(any(feature = "full", feature = "derive"))]
use gen::helper::fold::*;
use proc_macro2::Span;
diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
index ec84cd2748d97..1118fc914fb5c 100644
--- a/third_party/rust/syn/src/gen/visit.rs
+++ b/third_party/rust/syn/src/gen/visit.rs
@@ -1,6 +1,6 @@
// THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
-#![cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
+#![cfg_attr(feature = "cargo-clippy", allow(trivially_copy_pass_by_ref))]
#[cfg(any(feature = "full", feature = "derive"))]
use gen::helper::visit::*;
use proc_macro2::Span;
diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
index 4668c9b8e0343..ce875f5c8e342 100644
--- a/third_party/rust/syn/src/gen/visit_mut.rs
+++ b/third_party/rust/syn/src/gen/visit_mut.rs
@@ -1,6 +1,5 @@
// THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
-#![cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
#[cfg(any(feature = "full", feature = "derive"))]
use gen::helper::visit_mut::*;
use proc_macro2::Span;
diff --git a/third_party/rust/syn/src/gen_helper.rs b/third_party/rust/syn/src/gen_helper.rs
index fe00c8f78ecc7..b15b42be18cb6 100644
--- a/third_party/rust/syn/src/gen_helper.rs
+++ b/third_party/rust/syn/src/gen_helper.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
#[cfg(feature = "fold")]
pub mod fold {
use fold::Fold;
diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
index 51c60802ee972..59c7c4523b4e6 100644
--- a/third_party/rust/syn/src/generics.rs
+++ b/third_party/rust/syn/src/generics.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
use punctuated::{Iter, IterMut, Punctuated};
@@ -300,14 +292,9 @@ impl Generics {
/// Split a type's generics into the pieces required for impl'ing a trait
/// for that type.
///
- /// ```
- /// # #[macro_use]
- /// # extern crate quote;
- /// #
- /// # extern crate proc_macro2;
- /// # extern crate syn;
- /// #
+ /// ```edition2018
/// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
/// #
/// # fn main() {
/// # let generics: syn::Generics = Default::default();
@@ -490,19 +477,13 @@ pub mod parsing {
impl Parse for Generics {
fn parse(input: ParseStream) -> Result {
- let mut params = Punctuated::new();
-
if !input.peek(Token![<]) {
- return Ok(Generics {
- lt_token: None,
- params: params,
- gt_token: None,
- where_clause: None,
- });
+ return Ok(Generics::default());
}
let lt_token: Token![<] = input.parse()?;
+ let mut params = Punctuated::new();
let mut has_type_param = false;
loop {
if input.peek(Token![>]) {
@@ -658,7 +639,10 @@ pub mod parsing {
let mut bounds = Punctuated::new();
if has_colon {
loop {
- if input.peek(Token![,]) || input.peek(Token![>]) {
+ if input.peek(Token![,])
+ || input.peek(Token![>])
+ || input.peek(Token![=])
+ {
break;
}
let value = input.parse()?;
diff --git a/third_party/rust/syn/src/group.rs b/third_party/rust/syn/src/group.rs
index 84a19fdff50c1..272e435f242cb 100644
--- a/third_party/rust/syn/src/group.rs
+++ b/third_party/rust/syn/src/group.rs
@@ -74,9 +74,13 @@ impl private {
fn parse_delimited(input: ParseStream, delimiter: Delimiter) -> Result<(Span, ParseBuffer)> {
input.step(|cursor| {
if let Some((content, span, rest)) = cursor.group(delimiter) {
- let unexpected = private::get_unexpected(input);
+ #[cfg(procmacro2_semver_exempt)]
+ let scope = private::close_span_of_group(*cursor);
+ #[cfg(not(procmacro2_semver_exempt))]
+ let scope = span;
let nested = private::advance_step_cursor(cursor, content);
- let content = private::new_parse_buffer(span, nested, unexpected);
+ let unexpected = private::get_unexpected(input);
+ let content = private::new_parse_buffer(scope, nested, unexpected);
Ok(((span, content), rest))
} else {
let message = match delimiter {
@@ -94,15 +98,11 @@ fn parse_delimited(input: ParseStream, delimiter: Delimiter) -> Result<(Span, Pa
///
/// # Example
///
-/// ```rust
-/// # #[macro_use]
-/// # extern crate quote;
+/// ```edition2018
+/// # use quote::quote;
/// #
-/// #[macro_use]
-/// extern crate syn;
-///
-/// use syn::{token, Ident, Type};
-/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{parenthesized, token, Ident, Result, Token, Type};
+/// use syn::parse::{Parse, ParseStream};
/// use syn::punctuated::Punctuated;
///
/// // Parse a simplified tuple struct syntax like:
@@ -155,14 +155,11 @@ macro_rules! parenthesized {
///
/// # Example
///
-/// ```rust
-/// # #[macro_use]
-/// # extern crate quote;
+/// ```edition2018
+/// # use quote::quote;
/// #
-/// #[macro_use]
-/// extern crate syn;
-/// use syn::{token, Ident, Type};
-/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{braced, token, Ident, Result, Token, Type};
+/// use syn::parse::{Parse, ParseStream};
/// use syn::punctuated::Punctuated;
///
/// // Parse a simplified struct syntax like:
@@ -236,18 +233,12 @@ macro_rules! braced {
///
/// # Example
///
-/// ```rust
-/// # #[macro_use]
-/// # extern crate quote;
+/// ```edition2018
+/// # use quote::quote;
/// #
-/// #[macro_use]
-/// extern crate syn;
-///
-/// extern crate proc_macro2;
-///
/// use proc_macro2::TokenStream;
-/// use syn::token;
-/// use syn::parse::{Parse, ParseStream, Result};
+/// use syn::{bracketed, token, Result, Token};
+/// use syn::parse::{Parse, ParseStream};
///
/// // Parse an outer attribute like:
/// //
diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
index 9018f554498c1..78709bb8191b3 100644
--- a/third_party/rust/syn/src/item.rs
+++ b/third_party/rust/syn/src/item.rs
@@ -1,11 +1,3 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 or the MIT license
-// , at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
use super::*;
use derive::{Data, DeriveInput};
use proc_macro2::TokenStream;
@@ -770,6 +762,8 @@ pub mod parsing {
use ext::IdentExt;
use parse::{Parse, ParseStream, Result};
+ use proc_macro2::{Punct, Spacing, TokenTree};
+ use std::iter::FromIterator;
impl Parse for Item {
fn parse(input: ParseStream) -> Result {
@@ -807,7 +801,7 @@ pub mod parsing {
} else if lookahead.peek(Token![const]) {
ahead.parse::()?;
let lookahead = ahead.lookahead1();
- if lookahead.peek(Ident) {
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
input.parse().map(Item::Const)
} else if lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![async])
@@ -849,9 +843,9 @@ pub mod parsing {
input.parse().map(Item::Enum)
} else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
input.parse().map(Item::Union)
- } else if lookahead.peek(Token![trait])
- || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
- {
+ } else if lookahead.peek(Token![trait]) {
+ input.call(parse_trait_or_trait_alias)
+ } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
input.parse().map(Item::Trait)
} else if lookahead.peek(Token![impl ])
|| lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
@@ -903,17 +897,54 @@ pub mod parsing {
// TODO: figure out the actual grammar; is body required to be braced?
impl Parse for ItemMacro2 {
fn parse(input: ParseStream) -> Result {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let macro_token: Token![macro] = input.parse()?;
+ let ident: Ident = input.parse()?;
+
+ let paren_token;
let args;
+ let brace_token;
let body;
+ let lookahead = input.lookahead1();
+ if lookahead.peek(token::Paren) {
+ let paren_content;
+ paren_token = parenthesized!(paren_content in input);
+ args = paren_content.parse()?;
+
+ let brace_content;
+ brace_token = braced!(brace_content in input);
+ body = brace_content.parse()?;
+ } else if lookahead.peek(token::Brace) {
+ // Hack: the ItemMacro2 syntax tree will need to change so that
+ // we can store None for the args.
+ //
+ // https://github.com/dtolnay/syn/issues/548
+ //
+ // For now, store some sentinel tokens that are otherwise
+ // illegal.
+ paren_token = token::Paren::default();
+ args = TokenStream::from_iter(vec![
+ TokenTree::Punct(Punct::new('$', Spacing::Alone)),
+ TokenTree::Punct(Punct::new('$', Spacing::Alone)),
+ ]);
+
+ let brace_content;
+ brace_token = braced!(brace_content in input);
+ body = brace_content.parse()?;
+ } else {
+ return Err(lookahead.error());
+ }
+
Ok(ItemMacro2 {
- attrs: input.call(Attribute::parse_outer)?,
- vis: input.parse()?,
- macro_token: input.parse()?,
- ident: input.parse()?,
- paren_token: parenthesized!(args in input),
- args: args.parse()?,
- brace_token: braced!(body in input),
- body: body.parse()?,
+ attrs: attrs,
+ vis: vis,
+ macro_token: macro_token,
+ ident: ident,
+ paren_token: paren_token,
+ args: args,
+ brace_token: brace_token,
+ body: body,
})
}
}
@@ -925,7 +956,13 @@ pub mod parsing {
vis: input.parse()?,
extern_token: input.parse()?,
crate_token: input.parse()?,
- ident: input.parse()?,
+ ident: {
+ if input.peek(Token![self]) {
+ input.call(Ident::parse_any)?
+ } else {
+ input.parse()?
+ }
+ },
rename: {
if input.peek(Token![as]) {
let as_token: Token![as] = input.parse()?;
@@ -1023,7 +1060,14 @@ pub mod parsing {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
const_token: input.parse()?,
- ident: input.parse()?,
+ ident: {
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ }
+ },
colon_token: input.parse()?,
ty: input.parse()?,
eq_token: input.parse()?,
@@ -1438,6 +1482,34 @@ pub mod parsing {
}
}
+ fn parse_trait_or_trait_alias(input: ParseStream) -> Result
- {
+ let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?;
+ let lookahead = input.lookahead1();
+ if lookahead.peek(token::Brace)
+ || lookahead.peek(Token![:])
+ || lookahead.peek(Token![where])
+ {
+ let unsafety = None;
+ let auto_token = None;
+ parse_rest_of_trait(
+ input,
+ attrs,
+ vis,
+ unsafety,
+ auto_token,
+ trait_token,
+ ident,
+ generics,
+ )
+ .map(Item::Trait)
+ } else if lookahead.peek(Token![=]) {
+ parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics)
+ .map(Item::TraitAlias)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
impl Parse for ItemTrait {
fn parse(input: ParseStream) -> Result {
let attrs = input.call(Attribute::parse_outer)?;
@@ -1446,83 +1518,123 @@ pub mod parsing {
let auto_token: Option = input.parse()?;
let trait_token: Token![trait] = input.parse()?;
let ident: Ident = input.parse()?;
- let mut generics: Generics = input.parse()?;
- let colon_token: Option = input.parse()?;
-
- let mut supertraits = Punctuated::new();
- if colon_token.is_some() {
- loop {
- supertraits.push_value(input.parse()?);
- if input.peek(Token![where]) || input.peek(token::Brace) {
- break;
- }
- supertraits.push_punct(input.parse()?);
- if input.peek(Token![where]) || input.peek(token::Brace) {
- break;
- }
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+ attrs,
+ vis,
+ unsafety,
+ auto_token,
+ trait_token,
+ ident,
+ generics,
+ )
+ }
+ }
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+ attrs: Vec,
+ vis: Visibility,
+ unsafety: Option,
+ auto_token: Option,
+ trait_token: Token![trait],
+ ident: Ident,
+ mut generics: Generics,
+ ) -> Result {
+ let colon_token: Option = input.parse()?;
+
+ let mut supertraits = Punctuated::new();
+ if colon_token.is_some() {
+ loop {
+ supertraits.push_value(input.parse()?);
+ if input.peek(Token![where]) || input.peek(token::Brace) {
+ break;
+ }
+ supertraits.push_punct(input.parse()?);
+ if input.peek(Token![where]) || input.peek(token::Brace) {
+ break;
}
}
+ }
- generics.where_clause = input.parse()?;
+ generics.where_clause = input.parse()?;
- let content;
- let brace_token = braced!(content in input);
- let mut items = Vec::new();
- while !content.is_empty() {
- items.push(content.parse()?);
- }
-
- Ok(ItemTrait {
- attrs: attrs,
- vis: vis,
- unsafety: unsafety,
- auto_token: auto_token,
- trait_token: trait_token,
- ident: ident,
- generics: generics,
- colon_token: colon_token,
- supertraits: supertraits,
- brace_token: brace_token,
- items: items,
- })
+ let content;
+ let brace_token = braced!(content in input);
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
}
+
+ Ok(ItemTrait {
+ attrs: attrs,
+ vis: vis,
+ unsafety: unsafety,
+ auto_token: auto_token,
+ trait_token: trait_token,
+ ident: ident,
+ generics: generics,
+ colon_token: colon_token,
+ supertraits: supertraits,
+ brace_token: brace_token,
+ items: items,
+ })
}
impl Parse for ItemTraitAlias {
fn parse(input: ParseStream) -> Result {
- let attrs = input.call(Attribute::parse_outer)?;
- let vis: Visibility = input.parse()?;
- let trait_token: Token![trait] = input.parse()?;
- let ident: Ident = input.parse()?;
- let mut generics: Generics = input.parse()?;
- let eq_token: Token![=] = input.parse()?;
-
- let mut bounds = Punctuated::new();
- loop {
- if input.peek(Token![where]) || input.peek(Token![;]) {
- break;
- }
- bounds.push_value(input.parse()?);
- if input.peek(Token![where]) || input.peek(Token![;]) {
- break;
- }
- bounds.push_punct(input.parse()?);
+ let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?;
+ parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics)
+ }
+ }
+
+ fn parse_start_of_trait_alias(
+ input: ParseStream,
+ ) -> Result<(Vec, Visibility, Token![trait], Ident, Generics)> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let trait_token: Token![trait] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let generics: Generics = input.parse()?;
+ Ok((attrs, vis, trait_token, ident, generics))
+ }
+
+ fn parse_rest_of_trait_alias(
+ input: ParseStream,
+ attrs: Vec,
+ vis: Visibility,
+ trait_token: Token![trait],
+ ident: Ident,
+ mut generics: Generics,
+ ) -> Result {
+ let eq_token: Token![=] = input.parse()?;
+
+ let mut bounds = Punctuated::new();
+ loop {
+ if input.peek(Token![where]) || input.peek(Token![;]) {
+ break;
}
+ bounds.push_value(input.parse()?);
+ if input.peek(Token![where]) || input.peek(Token![;]) {
+ break;
+ }
+ bounds.push_punct(input.parse()?);
+ }
- generics.where_clause = input.parse()?;
- let semi_token: Token![;] = input.parse()?;
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
- Ok(ItemTraitAlias {
- attrs: attrs,
- vis: vis,
- trait_token: trait_token,
- ident: ident,
- generics: generics,
- eq_token: eq_token,
- bounds: bounds,
- semi_token: semi_token,
- })
- }
+ Ok(ItemTraitAlias {
+ attrs: attrs,
+ vis: vis,
+ trait_token: trait_token,
+ ident: ident,
+ generics: generics,
+ eq_token: eq_token,
+ bounds: bounds,
+ semi_token: semi_token,
+ })
}
impl Parse for TraitItem {
@@ -1712,7 +1824,20 @@ pub mod parsing {
let defaultness: Option = input.parse()?;
let unsafety: Option = input.parse()?;
let impl_token: Token![impl ] = input.parse()?;
- let generics: Generics = input.parse()?;
+
+ let has_generics = input.peek(Token![<])
+ && (input.peek2(Token![>])
+ || input.peek2(Token![#])
+ || (input.peek2(Ident) || input.peek2(Lifetime))
+ && (input.peek3(Token![:])
+ || input.peek3(Token![,])
+ || input.peek3(Token![>])));
+ let generics: Generics = if has_generics {
+ input.parse()?
+ } else {
+ Generics::default()
+ };
+
let trait_ = {
let ahead = input.fork();
if ahead.parse::