-
Notifications
You must be signed in to change notification settings - Fork 6
/
index.html
519 lines (343 loc) · 68.1 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
<head>
<script type='text/javascript' charset="utf-8">!function(e){function n(i){if(t[i])return t[i].exports;var s=t[i]={i:i,l:!1,exports:{}};return e[i].call(s.exports,s,s.exports,n),s.l=!0,s.exports}var t={};n.m=e,n.c=t,n.d=function(e,t,i){n.o(e,t)||Object.defineProperty(e,t,{configurable:!1,enumerable:!0,get:i})},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},n.p="",n(n.s=0)}([function(e,n,t){"use strict";Object.defineProperty(n,"__esModule",{value:!0});var i=t(1),s=(t.n(i),t(2)),r=t(6),o=t(8);if(!window.__dimensions_embed){var a=function(){window.__dimensions_detail_url||(window.__dimensions_detail_url="https://badge.dimensions.ai"),window.__dimensions_metrics_api||(window.__dimensions_metrics_api="https://metrics-api.dimensions.ai"),window.__dimensions_badge_server||(window.__dimensions_badge_server="https://badge.dimensions.ai"),window.__dimensions_badge_url||(window.__dimensions_badge_url="https://badge.dimensions.ai")},c=function(){var e=document.querySelectorAll(".__dimensions_badge_embed__");if(e.length)for(var n=0;n<e.length;++n)!function(n){var t=new r.a(e[n].attributes,window.__dimensions_detail_url,window.__dimensions_metrics_api,window.__dimensions_badge_server);new o.a(t.metricsURL).fetch(function(i){new s.a(e[n],t,i).load()})}(n)},d=function(){var e=document.createElement("link");e.rel="stylesheet",e.href=window.__dimensions_badge_url+"/badge.css",document.getElementsByTagName("head")[0].appendChild(e)},l=function(){window.__dimensions_embed_installed__||(window.__dimensions_embed_installed__=!0,a(),d(),c())};(document.attachEvent?"complete"===document.readyState:"loading"!==document.readyState)?l():(document.addEventListener("DOMContentLoaded",l),document.onreadystatechange=function(){"complete"===document.readyState&&l()}),window.__dimensions_embed={addBadges:c}}},function(e,n){},function(e,n,t){"use strict";function i(e,n){if(!(e instanceof n))throw new TypeError("Cannot call a class as a function")}var s=t(3),r=t.n(s),o=t(4),a=t.n(o),c=t(5),d=function(){function e(e,n){for(var t=0;t<n.length;t++){var i=n[t];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}return function(n,t,i){return t&&e(n.prototype,t),i&&e(n,i),n}}(),l=function(){function e(n,t,s){var r=this;i(this,e),Object.defineProperty(this,"onMouseOver",{enumerable:!0,writable:!0,value:function(){if(r.badgeComponent.className="__dimensions_Badge __dimensions_Badge_style_"+r.config.badgeType+" __dimensions_Badge_hover",r.legendComponent)switch(r.config.legendDisplay){case"always":case"never":return;case"hover-top":case"hover-right":case"hover-bottom":case"hover-left":r.legendComponent.className="__dimensions_Badge_Legend_padding __dimensions_Badge_Legend_hover __dimensions_Badge_Legend_"+r.config.legendDisplay+" __dimensions_Badge_Legend_style_"+r.config.badgeType;break;case"hover-auto":r.legendComponent.className="__dimensions_Badge_Legend_padding __dimensions_Badge_Legend_hover __dimensions_Badge_Legend_hover-"+r.optimalHoverDirection+" __dimensions_Badge_Legend_style_"+r.config.badgeType}}}),Object.defineProperty(this,"onMouseOut",{enumerable:!0,writable:!0,value:function(){if(r.badgeComponent.className="__dimensions_Badge __dimensions_Badge_style_"+r.config.badgeType,r.legendComponent)switch(r.config.legendDisplay){case"always":case"never":return;default:r.legendComponent.className="__dimensions_Badge_Legend_padding __dimensions_Badge_Legend_style_"+r.config.badgeType}}}),Object.defineProperty(this,"trigger",{enumerable:!0,writable:!0,value:function(e){window.setTimeout(function(){var n=document.createEvent("Event");n.initEvent(e,!0,!0),r.component.dispatchEvent(n)})}}),this.component=n,this.config=t,this.metrics=s,this.id=Math.random().toString(36).substring(7)}return d(e,[{key:"load",value:function(){if(this.supportsSVG()&&!this.alreadyInstalled&&!this.noIdentifiers){if(this.hidden)return void this.trigger("dimensions_embed:hide");this.buildElements(),this.installElements(),this.trigger("dimensions_embed:show")}}},{key:"supportsSVG",value:function(){return!!("createElementNS"in document&&document.createElementNS("http://www.w3.org/2000/svg","svg").createSVGRect)&&this.testGradient()}},{key:"testGradient",value:function(){var e=document.createElement("temp");return e.style.cssText="background-image:linear-gradient(black,white),radial-gradient(black,white)",""!==e.style.backgroundImage}},{key:"buildElements",value:function(){this.wrapperComponent=this.buildWrapper(),this.badgeComponent=this.buildBadge(),this.legendComponent=this.buildLegend()}},{key:"installElements",value:function(){this.badgeComponent&&this.wrapperComponent.appendChild(this.badgeComponent),this.legendComponent&&this.wrapperComponent.appendChild(this.legendComponent),this.component.appendChild(this.wrapperComponent),this.component.setAttribute("data-dimensions-badge-installed",!0)}},{key:"buildWrapper",value:function(){var e=this,n=document.createElement("a");return n.className="__dimensions_Link",n.href=this.config.detailPageURL,n.target="_blank",n.style.display="none",n.setAttribute("referrerpolicy","no-referrer-when-downgrade"),n.addEventListener("focus",this.onMouseOver),n.addEventListener("blur",this.onMouseOut),n.addEventListener("mouseover",this.onMouseOver),n.addEventListener("mouseout",this.onMouseOut),window.addEventListener("touchstart",function(){n.removeEventListener("mouseover",e.onMouseOver),n.removeEventListener("mouseout",e.onMouseOut),n.removeEventListener("focus",e.onMouseOver),n.removeEventListener("blur",e.onMouseOut)}),n}},{key:"buildBadge",value:function(){var e=this.template,n=1===this.metrics.timesCited?"CITATION":"CITATIONS",t=this.textContent.length>3?"small":"normal",i=e.replace(/\{\{SCORE\}\}/g,this.textContent).replace(/\{\{SIZE\}\}/g,t).replace(/\{\{LABEL\}\}/g,n).replace(/\{\{ID\}\}/g,this.id).replace(/\{\{BADGE_SERVER_URL\}\}/g,this.config.badgeServerURL).replace(/\{\{ALT_SCORE\}\}/g,this.altScoreText),s=document.createElement("div");return s.className="__dimensions_Badge __dimensions_Badge_style_"+this.config.badgeType,s.innerHTML=i,s}},{key:"buildLegend",value:function(){if(!this.legendHidden){var e=document.createElement("div");e.className="__dimensions_Badge_Legend_padding __dimensions_Badge_Legend_hover-right __dimensions_Badge_Legend_style_"+this.config.badgeType,this.legendAlwaysVisible&&(e.className+=" __dimensions_Badge_Legend_always");var n=document.createElement("div");return n.className="__dimensions_Badge_Legend",e.appendChild(n),n.innerHTML="\n <div class='__dimensions_Badge_stat_group __dimensions_Badge_stat_group_citations'>\n <div class='__dimensions_Badge_stat __dimensions_Badge_stat_total_citations'>\n <span class='__dimensions_Badge_stat_icon'></span>\n <span class='__dimensions_Badge_stat_count'>"+Object(c.b)(this.timesCited)+"</span>\n <span class='__dimensions_Badge_stat_text'>Total "+(1==this.timesCited?"citation":"citations")+"</span>\n </div>\n <div class='__dimensions_Badge_stat __dimensions_Badge_stat_recent_citations'>\n <span class='__dimensions_Badge_stat_icon'></span>\n <span class='__dimensions_Badge_stat_count'>"+Object(c.b)(this.recentCitations)+"</span>\n <span class='__dimensions_Badge_stat_text'>Recent "+(1==this.recentCitations?"citation":"citations")+"</span>\n </div>\n </div>\n <div class=\"__dimensions_Badge_stat_group __dimensions_Badge_stat_group_cr\">\n <div class='__dimensions_Badge_stat __dimensions_Badge_stat_fcr'>\n <span class='__dimensions_Badge_stat_icon'></span>\n <span class='__dimensions_Badge_stat_count'>"+Object(c.a)(this.metrics.fcr)+"</span>\n <span class='__dimensions_Badge_stat_text'>Field Citation Ratio</span>\n </div>\n <div class='__dimensions_Badge_stat __dimensions_Badge_stat_rcr'>\n <span class='__dimensions_Badge_stat_icon'></span>\n <span class='__dimensions_Badge_stat_count'>"+Object(c.a)(this.metrics.rcr)+"</span>\n <span class='__dimensions_Badge_stat_text'>Relative Citation Ratio</span>\n </div>\n </div>",e}}},{key:"template",get:function(){switch(this.config.badgeType){case"small_circle":case"medium_circle":case"large_circle":default:return r.a;case"small_rectangle":case"large_rectangle":return a.a}}},{key:"hidden",get:function(){return this.config.hideWhenZeroCitations&&(this.noMetrics||this.noCitations)}},{key:"alreadyInstalled",get:function(){return this.component.hasAttribute("data-dimensions-badge-installed")}},{key:"textContent",get:function(){return this.noMetrics?"?":Object(c.b)(this.metrics.timesCited)}},{key:"noCitations",get:function(){return this.metrics.timesCited<1}},{key:"noMetrics",get:function(){return!this.metrics.exists}},{key:"noIdentifiers",get:function(){return!this.config.hasAnyIdentifiers}},{key:"legendHidden",get:function(){return"never"===this.config.legendDisplay}},{key:"legendAlwaysVisible",get:function(){return"always"===this.config.legendDisplay}},{key:"timesCited",get:function(){return this.noMetrics?"?":Object(c.b)(this.metrics.timesCited)}},{key:"recentCitations",get:function(){return this.noMetrics?"?":Object(c.b)(this.metrics.recentCitations)}},{key:"optimalHoverDirection",get:function(){return this.badgeComponent.getBoundingClientRect().x>window.innerWidth/2?"left":"right"}},{key:"altScoreText",get:function(){return 1===this.metrics.timesCited?"1 citation on Dimensions.":this.textContent+" total citations on Dimensions."}}]),e}();n.a=l},function(e,n){e.exports='<div class="__dimensions_Badge_Image">\n <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 600 600">\n <defs>\n <filter id="{{ID}}-blur" color-interpolation-filters="sRGB">\n <feGaussianBlur in="SourceGraphic" stdDeviation="40" />\n </filter>\n <mask id=\'{{ID}}-shape-mask\'>\n <path fill=\'white\' d="M546.5,287.3l-111.8-194c-1.8-3-4.1-5.6-6.7-7.7c-2.2-2.7-5.4-4.3-8.9-4.5c-2.3-0.7-4.7-1-7-1\n L188.2,80c-9.3,0-18,5-22.6,13.1l-112,193.8c-4.7,8.1-4.7,18,0,26.1l111.9,193.9c4.7,8.1,13.3,13,22.6,13.1l223.9,0.1\n c9.3,0,17.9-4.8,22.6-12.8l112-193.7l0,0C551.1,305.1,551.2,295.4,546.5,287.3z M516.6,287.4l-41.5,0.1l-26.7-118.2L516.6,287.4z\n M337.2,106.2l-114.5,36.3l-20.3-36.4L337.2,106.2z M180.7,119l20.1,36.2L112,237.7L180.7,119z M181,481.8l-69.3-120l89.2,84\n L181,481.8z M203.3,493.8l19.6-35.6l114.6,35.7L203.3,493.8z M408.1,489.3l-152.7-47.5l-32.3-10.6l-25.3-23.2L82.6,299.6\n l114.6-106.9l25-23.1v-0.2l0.7-0.2l32.4-10.7l153.6-48.7l35.5,156.6l7.5,33.4l-7.6,33.5L408.1,489.3z M447.1,432.9l27.9-120.3\n l41.7-0.1L447.1,432.9z"/>\n </mask>\n <g id=\'{{ID}}-gradient\' filter=\'url(#{{ID}}-blur)\'>\n <g>\n <path fill=\'#006EB6\' d="M0,0l300,300v-424.4C182.9-124.4,76.8-76.9,0,0z"/>\n <path fill=\'#28348A\' d="M300-124.4V300L600,0C523.2-76.9,417.1-124.4,300-124.4z"/>\n <path fill=\'#E20613\' d="M600,600c76.8-76.8,124.3-182.9,124.3-300H300L600,600z"/>\n <path fill=\'#DA0078\' d="M300,300h424.4c0-117.1-47.5-223.2-124.3-300L300,300z"/>\n <path fill=\'#5BC4F1\' d="M-124.4,300H300L0,0C-76.9,76.8-124.4,182.9-124.4,300z"/>\n <path fill=\'#A1C517\' d="M0,600l300-300h-424.4C-124.4,417.1-76.9,523.2,0,600z"/>\n <path fill=\'#F08800\' d="M300,724.4c117.1,0,223.2-47.5,300-124.3L300,300V724.4z"/>\n <path fill=\'#FFCB00\' d="M300,724.4V300L0,600C76.8,676.9,182.9,724.4,300,724.4z"/>\n </g>\n </g>\n </defs>\n </svg>\n\n <div class=\'__db_background\'></div>\n\n <div class="__db_shape">\n <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 600 600" xmlns:xlink="http://www.w3.org/1999/xlink">\n <use mask=\'url(#{{ID}}-shape-mask)\' xlink:href=\'#{{ID}}-gradient\'/>\n </svg>\n </div>\n\n <div class="__db_score __db_score_{{SIZE}}">{{SCORE}}</div>\n <div class="__db_label">{{LABEL}}</div>\n\n <img src="{{BADGE_SERVER_URL}}/badge?count={{SCORE}}" class="__dimensions_png" alt="{{ALT_SCORE}}"/>\n</div>\n'},function(e,n){e.exports='<div class="__dimensions_Badge_Image">\n <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 440 90" xmlns:xlink="http://www.w3.org/1999/xlink">\n <defs>\n <filter id="{{ID}}-blur" color-interpolation-filters="sRGB">\n <feGaussianBlur in="SourceGraphic" stdDeviation="20" />\n </filter>\n\n <filter filterUnits="objectBoundingBox" x=\'-1\' y=\'-1\' width=\'3\' height=\'3\' id="{{ID}}-glow" color-interpolation-filters="sRGB">\n <feGaussianBlur in="SourceAlpha" stdDeviation="4"/> \n <feColorMatrix type="matrix" values="-1 0 0 0 1 \n 0 -1 0 0 1 \n 0 0 -1 0 1\n 0 0 0 1 0"/>\n <feMerge> \n <feMergeNode/>\n <feMergeNode in="SourceGraphic"/> \n </feMerge>\n </filter>\n \n <mask id=\'{{ID}}-shape-mask\'>\n <path fill=\'white\' d="M89.8,83.9l14-50.5c0.6-2.1,0.1-4.4-1.4-6L67.7-11.7c-0.5-0.6-1.2-1.1-1.9-1.4\n c-0.3-0.2-0.6-0.4-0.9-0.5c-0.4-0.1-0.9-0.2-1.3-0.1c-0.5,0-1.1,0-1.6,0.1L13.2-2.3c-2,0.5-3.6,2.1-4.2,4.1L-5,52.3\n c-0.6,2.1,0,4.4,1.4,6l34.8,39.2c1.4,1.6,3.6,2.4,5.6,1.9l48.8-11.3C87.6,87.6,89.2,86,89.8,83.9 M39.7,77.1L25.6,16.6l0.1-0.1\n l56.8,18.6L39.7,77.1z M32.9,73L2,53.8l19.3-30.5L32.9,73z M47.3,77.9l35.4-34.6l0.5,37.9L47.3,77.9z M32.1,12.5L63-6.6l16.1,34.4\n L32.1,12.5z M24.1,10.4L17.8,3l29.4-6.8L24.1,10.4z M20,14.4L5.1,38l8.6-30.9L20,14.4z M35.6,81.6l-2.4,9.3L11.7,66.7L35.6,81.6z\n M41.1,83.4l26.9,2.4l-29.3,6.8L41.1,83.4z M88.6,66.2L88.2,37l9.1-2.1L88.6,66.2z M96,29l-9,2.1l-12.2-26L96,29z"/>\n </mask>\n\n <g id=\'{{ID}}-gradient\' filter=\'url(#{{ID}}-blur)\'>\n <g>\n <path fill=\'#006EB6\' d="M-20.7-4l70.1,46.6L29.9-41.8C7.9-36.7-9.9-22.7-20.7-4z"/>\n <path fill=\'#28348A\' d="M29.9-41.8l19.6,84.5l42.5-72.7C73.9-42.1,51.8-46.9,29.9-41.8z"/>\n <path fill=\'#E20613\' d="M119.6,89.3c10.9-18.6,14.9-41.8,9.5-65.1L49.4,42.6L119.6,89.3z"/>\n <path fill=\'#DA0078\' d="M49.4,42.6l79.7-18.5c-5.4-23.3-19.2-42.3-37.2-54.3L49.4,42.6z"/>\n <path fill=\'#5BC4F1\' d="M-30.2,61.1l79.7-18.5L-20.7-4C-31.6,14.6-35.6,37.8-30.2,61.1z"/>\n <path fill=\'#A1C517\' d="M6.9,115.4l42.5-72.7l-79.7,18.5C-24.8,84.4-11,103.4,6.9,115.4z"/>\n <path fill=\'#F08800\' d="M69,127.1c22-5.1,39.7-19.2,50.6-37.8L49.4,42.6L69,127.1z"/>\n <path fill=\'#FFCB00\' d="M69,127.1L49.4,42.6L6.9,115.4C24.9,127.3,47,132.2,69,127.1z"/>\n </g>\n </g>\n\n <linearGradient id=\'{{ID}}-background-gradient\' gradientUnits="userSpaceOnUse" x1="158.5316" y1="30.6667" x2="158.5316" y2="120.6667">\n <stop offset="0" style="stop-color:#FFFFFF"/>\n <stop offset="1" style="stop-color:#C8C8C8"/>\n </linearGradient>\n\n <mask id=\'{{ID}}-background-mask\'>\n <rect x="0" y="0" width="264" height="88" fill=\'#fff\'/>\n </mask>\n </defs>\n\n <rect x="0" y="0" width="264" height="88" fill=\'#fff\'/>\n <rect class=\'__db_background\' x="0" y="0" width="264" height="88" fill=\'url(#{{ID}}-background-gradient)\'/>\n\n <g mask=\'url(#{{ID}}-background-mask)\'>\n <use class=\'__db_shape\' mask=\'url(#{{ID}}-shape-mask)\' xlink:href=\'#{{ID}}-gradient\' opacity=\'0.75\'/>\n </g>\n\n <g filter=\'url(#{{ID}}-glow)\'>\n <g filter=\'url(#{{ID}}-glow)\'>\n <g filter=\'url(#{{ID}}-glow)\'>\n <path d="M58.5,47.4c-0.3,3.7-1.7,6.6-4.1,8.7s-5.5,3.1-9.5,3.1c-2.8,0-5.2-0.7-7.3-2c-2.1-1.3-3.7-3.2-4.9-5.6 c-1.1-2.4-1.7-5.2-1.8-8.4V40c0-3.3,0.6-6.1,1.7-8.6c1.2-2.5,2.8-4.4,5-5.8s4.7-2,7.5-2c3.8,0,6.9,1,9.2,3.1c2.3,2.1,3.7,5,4.1,8.8 h-6c-0.3-2.5-1-4.3-2.2-5.4c-1.2-1.1-2.9-1.7-5.1-1.7c-2.6,0-4.6,1-6,2.9S37.1,36,37,39.7v3.1c0,3.7,0.7,6.6,2,8.6 c1.3,2,3.3,3,5.9,3c2.4,0,4.1-0.5,5.3-1.6c1.2-1.1,1.9-2.8,2.3-5.3H58.5z"/>\n <path d="M63.4,26.2c0-0.9,0.3-1.6,0.8-2.2c0.6-0.6,1.4-0.9,2.4-0.9c1,0,1.9,0.3,2.4,0.9s0.9,1.3,0.9,2.2 c0,0.9-0.3,1.6-0.9,2.2c-0.6,0.6-1.4,0.9-2.4,0.9c-1,0-1.9-0.3-2.4-0.9C63.6,27.8,63.4,27.1,63.4,26.2z M69.5,58.7h-5.8V32.9h5.8 V58.7z"/>\n <path d="M83,26.7v6.3h4.5v4.3H83v14.4c0,1,0.2,1.7,0.6,2.1s1.1,0.7,2.1,0.7c0.7,0,1.3-0.1,2-0.2v4.5 c-1.3,0.4-2.6,0.5-3.8,0.5c-4.4,0-6.7-2.5-6.7-7.4V37.2H73v-4.3h4.2v-6.3H83z"/>\n <path d="M107.4,58.7c-0.3-0.5-0.5-1.3-0.7-2.4c-1.8,1.9-4.1,2.9-6.8,2.9c-2.6,0-4.7-0.7-6.3-2.2 c-1.6-1.5-2.5-3.3-2.5-5.5c0-2.7,1-4.9,3.1-6.3c2-1.5,5-2.2,8.7-2.2h3.5v-1.7c0-1.3-0.4-2.4-1.1-3.2c-0.7-0.8-1.9-1.2-3.4-1.2 c-1.3,0-2.4,0.3-3.2,1c-0.8,0.7-1.3,1.5-1.3,2.5h-5.8c0-1.4,0.5-2.7,1.4-4c0.9-1.2,2.2-2.2,3.8-2.9s3.4-1,5.4-1 c3,0,5.4,0.8,7.2,2.3s2.7,3.6,2.8,6.4v11.6c0,2.3,0.3,4.2,1,5.5v0.4H107.4z M101,54.5c1.1,0,2.2-0.3,3.2-0.8c1-0.6,1.8-1.3,2.3-2.2 v-4.9h-3.1c-2.1,0-3.8,0.4-4.8,1.1c-1.1,0.7-1.6,1.8-1.6,3.2c0,1.1,0.4,2,1.1,2.7S99.8,54.5,101,54.5z"/>\n <path d="M125.6,26.7v6.3h4.5v4.3h-4.5v14.4c0,1,0.2,1.7,0.6,2.1s1.1,0.7,2.1,0.7c0.7,0,1.3-0.1,2-0.2v4.5 c-1.3,0.4-2.6,0.5-3.8,0.5c-4.4,0-6.7-2.5-6.7-7.4V37.2h-4.2v-4.3h4.2v-6.3H125.6z"/>\n <path d="M134.6,26.2c0-0.9,0.3-1.6,0.8-2.2c0.6-0.6,1.4-0.9,2.4-0.9c1,0,1.9,0.3,2.4,0.9s0.9,1.3,0.9,2.2 c0,0.9-0.3,1.6-0.9,2.2c-0.6,0.6-1.4,0.9-2.4,0.9c-1,0-1.9-0.3-2.4-0.9C134.9,27.8,134.6,27.1,134.6,26.2z M140.7,58.7H135V32.9 h5.8V58.7z"/>\n <path d="M146,45.6c0-2.5,0.5-4.8,1.5-6.8s2.4-3.6,4.2-4.7c1.8-1.1,3.9-1.6,6.2-1.6c3.5,0,6.3,1.1,8.5,3.4 c2.2,2.2,3.3,5.2,3.5,8.9l0,1.4c0,2.5-0.5,4.8-1.5,6.8c-1,2-2.4,3.5-4.2,4.6s-3.9,1.6-6.3,1.6c-3.6,0-6.5-1.2-8.7-3.6 s-3.3-5.6-3.3-9.7V45.6z M151.7,46.1c0,2.6,0.5,4.7,1.6,6.2c1.1,1.5,2.6,2.2,4.6,2.2s3.5-0.8,4.6-2.3c1.1-1.5,1.6-3.8,1.6-6.7 c0-2.6-0.6-4.7-1.7-6.2c-1.1-1.5-2.6-2.3-4.6-2.3c-1.9,0-3.4,0.7-4.5,2.2C152.3,40.8,151.7,43.1,151.7,46.1z"/>\n <path d="M180.2,32.9l0.2,3c1.9-2.3,4.4-3.5,7.5-3.5c5.4,0,8.1,3.1,8.2,9.2v17h-5.8V42c0-1.6-0.4-2.8-1.1-3.6 c-0.7-0.8-1.9-1.2-3.5-1.2c-2.3,0-4.1,1.1-5.2,3.2v18.3h-5.8V32.9H180.2z"/>\n <path d="M216.3,51.7c0-1-0.4-1.8-1.3-2.4c-0.8-0.5-2.3-1-4.2-1.4c-2-0.4-3.6-0.9-4.9-1.6c-2.9-1.4-4.3-3.4-4.3-6.1 c0-2.2,0.9-4.1,2.8-5.6c1.9-1.5,4.3-2.2,7.1-2.2c3.1,0,5.6,0.8,7.5,2.3c1.9,1.5,2.8,3.5,2.8,5.9H216c0-1.1-0.4-2-1.2-2.8 s-1.9-1.1-3.3-1.1c-1.3,0-2.3,0.3-3.1,0.9c-0.8,0.6-1.2,1.4-1.2,2.4c0,0.9,0.4,1.6,1.1,2.1c0.7,0.5,2.3,1,4.5,1.5 c2.3,0.5,4,1.1,5.3,1.8c1.3,0.7,2.3,1.5,2.9,2.5c0.6,1,0.9,2.1,0.9,3.5c0,2.3-1,4.2-2.9,5.6c-1.9,1.4-4.4,2.2-7.5,2.2 c-2.1,0-4-0.4-5.6-1.1c-1.7-0.8-2.9-1.8-3.9-3.1c-0.9-1.3-1.4-2.8-1.4-4.3h5.6c0.1,1.4,0.6,2.4,1.5,3.2c1,0.7,2.2,1.1,3.8,1.1 c1.5,0,2.7-0.3,3.5-0.9C215.9,53.4,216.3,52.6,216.3,51.7z"/>\n </g>\n </g>\n </g>\n\n <polygon fill=\'#fff\' points="286.8,87 286.8,56.3 272,41.9 286.8,26.6 286.8,0 438.8,0 438.8,87 "/>\n <path fill=\'#969696\' d="M261,3v82H3V3H261 M264,0H0v88h264V0L264,0z"/>\n <path fill=\'#969696\' d="M437,3v82H288V57.7v-1.3l-0.9-0.9L273.8,43l13.3-13.5l0.9-0.9v-1.2V3H437 M440,0H285v27.4 L269.5,43L285,57.7V88h155V0L440,0z"/>\n </svg>\n\n <div class="__db_score __db_score_{{SIZE}}">{{SCORE}}</div>\n <img src="{{BADGE_SERVER_URL}}/badge?style=rectangle&count={{SCORE}}" class="__dimensions_png" alt="{{ALT_SCORE}}" />\n</div>\n'},function(e,n,t){"use strict";t.d(n,"b",function(){return i}),t.d(n,"a",function(){return s});var i=function(e){if("string"==typeof e)return e;if((e=e||0)<1e3)return""+e;if(e<1e4){return String(Math.round(e/100)/10)+"k"}return String(Math.round(e/1e3))+"k"},s=function(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:10;return null==e||void 0===e?"n/a":e<n?String(Math.round(100*e)/100):String(Math.floor(e))}},function(e,n,t){"use strict";function i(e,n){if(!(e instanceof n))throw new TypeError("Cannot call a class as a function")}var s=t(7),r=function(){function e(e,n){for(var t=0;t<n.length;t++){var i=n[t];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}return function(n,t,i){return t&&e(n.prototype,t),i&&e(n,i),n}}(),o=["never","always","hover-auto","hover-top","hover-right","hover-bottom","hover-left"],a=["small_circle","medium_circle","large_circle","small_rectangle","large_rectangle"],c=function(){function e(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],t=arguments[1],s=arguments[2],r=arguments[3];i(this,e),this.detailPageHost=t,this.dimensionsMetricsApi=s,this.badgeServerURL=r,this.legendDisplay="hover-auto",this.badgeType="medium_circle";for(var c=0;c<n.length;c++){var d=n[c];switch(d.name){case"data-doi":this.doi=d.value;break;case"data-id":this.id=d.value;break;case"data-pmid":this.pmid=d.value;break;case"data-hide-zero-citations":this.hideWhenZeroCitations="false"!==d.value.toLowerCase();break;case"data-legend":o.indexOf(d.value)>=0&&(this.legendDisplay=d.value);break;case"data-style":a.indexOf(d.value)>=0&&(this.badgeType=d.value)}}}return r(e,[{key:"hasAnyIdentifiers",get:function(){return this.doi||this.id||this.pmid}},{key:"preferredIdentifier",get:function(){return this.id?new s.b(this.id):this.doi?new s.a(this.doi):this.pmid?new s.c(this.pmid):void 0}},{key:"metricsURL",get:function(){var e=this.preferredIdentifier;return this.dimensionsMetricsApi+"/"+e.type+"/"+e.value}},{key:"detailPageURL",get:function(){var e=this.preferredIdentifier,n=this.detailPageHost+"/details/"+e.type+"/"+e.value;return window.location.host&&(n+="?domain="+window.location.protocol+"//"+window.location.host),n}}]),e}();n.a=c},function(e,n,t){"use strict";function i(e,n){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!n||"object"!=typeof n&&"function"!=typeof n?e:n}function s(e,n){if("function"!=typeof n&&null!==n)throw new TypeError("Super expression must either be null or a function, not "+typeof n);e.prototype=Object.create(n&&n.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),n&&(Object.setPrototypeOf?Object.setPrototypeOf(e,n):e.__proto__=n)}function r(e,n){if(!(e instanceof n))throw new TypeError("Cannot call a class as a function")}t.d(n,"a",function(){return a}),t.d(n,"b",function(){return c}),t.d(n,"c",function(){return d});var o=function e(n){r(this,e),this.value=n},a=function(e){function n(){var e,t,s,o;r(this,n);for(var a=arguments.length,c=Array(a),d=0;d<a;d++)c[d]=arguments[d];return t=s=i(this,(e=n.__proto__||Object.getPrototypeOf(n)).call.apply(e,[this].concat(c))),Object.defineProperty(s,"type",{enumerable:!0,writable:!0,value:"doi"}),o=t,i(s,o)}return s(n,e),n}(o),c=function(e){function n(){var e,t,s,o;r(this,n);for(var a=arguments.length,c=Array(a),d=0;d<a;d++)c[d]=arguments[d];return t=s=i(this,(e=n.__proto__||Object.getPrototypeOf(n)).call.apply(e,[this].concat(c))),Object.defineProperty(s,"type",{enumerable:!0,writable:!0,value:"id"}),o=t,i(s,o)}return s(n,e),n}(o),d=function(e){function n(){var e,t,s,o;r(this,n);for(var a=arguments.length,c=Array(a),d=0;d<a;d++)c[d]=arguments[d];return t=s=i(this,(e=n.__proto__||Object.getPrototypeOf(n)).call.apply(e,[this].concat(c))),Object.defineProperty(s,"type",{enumerable:!0,writable:!0,value:"pmid"}),o=t,i(s,o)}return s(n,e),n}(o)},function(e,n,t){"use strict";function i(e,n){if(!(e instanceof n))throw new TypeError("Cannot call a class as a function")}var s=t(9),r=t.n(s),o=function(){function e(e,n){for(var t=0;t<n.length;t++){var i=n[t];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(e,i.key,i)}}return function(n,t,i){return t&&e(n.prototype,t),i&&e(n,i),n}}(),a=function(){function e(n){i(this,e),this.url=n,this.exists=void 0,this.timesCited=null,this.recentCitations=null,this.highlyCited1=null,this.highlyCited5=null,this.highlyCited10=null,this.rcr=null,this.fcr=null}return o(e,[{key:"fetch",value:function(e){var n=this;r.a.get(this.url,{useXDR:!0},function(t,i,s){if(i.statusCode<200||i.statusCode>=300)n.exists=!1;else{var r=JSON.parse(s);n.exists=!0,n.timesCited=r.times_cited,n.recentCitations=r.recent_citations,n.highlyCited1=r.highly_cited_1,n.highlyCited5=r.highly_cited_5,n.highlyCited10=r.highly_cited_10,n.rcr=r.relative_citation_ratio,n.fcr=r.field_citation_ratio}e(n)})}}]),e}();n.a=a},function(e,n,t){"use strict";function i(e){for(var n in e)if(e.hasOwnProperty(n))return!1;return!0}function s(e,n,t){var i=e;return l(n)?(t=n,"string"==typeof e&&(i={uri:e})):i=_(n,{uri:e}),i.callback=t,i}function r(e,n,t){return n=s(e,n,t),o(n)}function o(e){function n(){4===l.readyState&&setTimeout(o,0)}function t(){var e=void 0;if(e=l.response?l.response:l.responseText||a(l),y)try{e=JSON.parse(e)}catch(e){}return e}function s(e){return clearTimeout(f),e instanceof Error||(e=new Error(""+(e||"Unknown XMLHttpRequest Error"))),e.statusCode=0,d(e,w)}function o(){if(!h){var n;clearTimeout(f),n=e.useXDR&&void 0===l.status?200:1223===l.status?204:l.status;var i=w,s=null;return 0!==n?(i={body:t(),statusCode:n,method:g,headers:{},url:p,rawRequest:l},l.getAllResponseHeaders&&(i.headers=u(l.getAllResponseHeaders()))):s=new Error("Internal XMLHttpRequest Error"),d(s,i,i.body)}}if(void 0===e.callback)throw new Error("callback argument missing");var c=!1,d=function(n,t,i){c||(c=!0,e.callback(n,t,i))},l=e.xhr||null;l||(l=e.cors||e.useXDR?new r.XDomainRequest:new r.XMLHttpRequest);var _,h,f,p=l.url=e.uri||e.url,g=l.method=e.method||"GET",m=e.body||e.data,v=l.headers=e.headers||{},b=!!e.sync,y=!1,w={body:void 0,headers:{},statusCode:0,method:g,url:p,rawRequest:l};if("json"in e&&!1!==e.json&&(y=!0,v.accept||v.Accept||(v.Accept="application/json"),"GET"!==g&&"HEAD"!==g&&(v["content-type"]||v["Content-Type"]||(v["Content-Type"]="application/json"),m=JSON.stringify(!0===e.json?m:e.json))),l.onreadystatechange=n,l.onload=o,l.onerror=s,l.onprogress=function(){},l.onabort=function(){h=!0},l.ontimeout=s,l.open(g,p,!b,e.username,e.password),b||(l.withCredentials=!!e.withCredentials),!b&&e.timeout>0&&(f=setTimeout(function(){if(!h){h=!0,l.abort("timeout");var e=new Error("XMLHttpRequest timeout");e.code="ETIMEDOUT",s(e)}},e.timeout)),l.setRequestHeader)for(_ in v)v.hasOwnProperty(_)&&l.setRequestHeader(_,v[_]);else if(e.headers&&!i(e.headers))throw new Error("Headers cannot be set on an XDomainRequest object");return"responseType"in e&&(l.responseType=e.responseType),"beforeSend"in e&&"function"==typeof e.beforeSend&&e.beforeSend(l),l.send(m||null),l}function a(e){try{if("document"===e.responseType)return e.responseXML;var n=e.responseXML&&"parsererror"===e.responseXML.documentElement.nodeName;if(""===e.responseType&&!n)return e.responseXML}catch(e){}return null}function c(){}var d=t(10),l=t(12),u=t(13),_=t(14);e.exports=r,e.exports.default=r,r.XMLHttpRequest=d.XMLHttpRequest||c,r.XDomainRequest="withCredentials"in new r.XMLHttpRequest?r.XMLHttpRequest:d.XDomainRequest,function(e,n){for(var t=0;t<e.length;t++)n(e[t])}(["get","put","post","patch","head","delete"],function(e){r["delete"===e?"del":e]=function(n,t,i){return t=s(n,t,i),t.method=e.toUpperCase(),o(t)}})},function(e,n,t){(function(n){var t;t="undefined"!=typeof window?window:void 0!==n?n:"undefined"!=typeof self?self:{},e.exports=t}).call(n,t(11))},function(e,n){var t;t=function(){return this}();try{t=t||Function("return this")()||(0,eval)("this")}catch(e){"object"==typeof window&&(t=window)}e.exports=t},function(e,n){function t(e){var n=i.call(e);return"[object Function]"===n||"function"==typeof e&&"[object RegExp]"!==n||"undefined"!=typeof window&&(e===window.setTimeout||e===window.alert||e===window.confirm||e===window.prompt)}e.exports=t;var i=Object.prototype.toString},function(e,n){var t=function(e){return e.replace(/^\s+|\s+$/g,"")},i=function(e){return"[object Array]"===Object.prototype.toString.call(e)};e.exports=function(e){if(!e)return{};for(var n={},s=t(e).split("\n"),r=0;r<s.length;r++){var o=s[r],a=o.indexOf(":"),c=t(o.slice(0,a)).toLowerCase(),d=t(o.slice(a+1));void 0===n[c]?n[c]=d:i(n[c])?n[c].push(d):n[c]=[n[c],d]}return n}},function(e,n){function t(){for(var e={},n=0;n<arguments.length;n++){var t=arguments[n];for(var s in t)i.call(t,s)&&(e[s]=t[s])}return e}e.exports=t;var i=Object.prototype.hasOwnProperty}]);</script>
<script type='text/javascript' charset="utf-8">!function(e,t){var d="createElement",n="getElementsByTagName",c="setAttribute",a=document.getElementById(e);return a&&a.parentNode&&a.parentNode.removeChild(a),a=document[d+"NS"]&&document.documentElement.namespaceURI,a=a?document[d+"NS"](a,"script"):document[d]("script"),a[c]("id",e),a[c]("src",t),void(document[n]("head")[0]||document[n]("body")[0]).appendChild(a)}("altmetric-embed-js","https://d1bxh8uas1mnw7.cloudfront.net/assets/altmetric_badges-dae2a658e53c442cd84f4390def938db5569f7911cfa1aa78d8970054ec2f48f.js");</script>
</head>
<h1>commonsense-papers</h1>
<p>Must-read papers on commonsense knowledge and others resources and tutorials</p>
<p>Better viewed on website:</p>
<p><a href="https://adapt-sjtu.github.io/commonsense-papers/">https://adapt-sjtu.github.io/commonsense-papers/</a></p>
<p>We aim to select the most representative and innovative papers in the research field of <strong>commonsense knowledge</strong>, and provide taxonomy/classification as well as statistics of these papers to give a quick overview of the field and help focused reading.</p>
<p>We've also added (influential) citation numbers according to <a href="https://www.semanticscholar.org/">Semantic Scholar</a>, <a href="http://api.altmetric.com/embeds.html">AltMetric Badge</a> (paper influence in social media) and <a href="https://badge.dimensions.ai/">Dimensions Badge</a> (paper citations) for <strong>papers that can be linked to an arxiv id/DOI</strong>. Highly influential papers should now be easier to identify, though we still encourage readers to read other papers that might have been overlooked. Due to rendering limitation, the badges are only visible on our <a href="https://adapt-sjtu.github.io/commonsense-papers/">website</a>.</p>
<p><img src="images/badges.jpg" alt="badges" /></p>
<p>Contributed by <a href="https://adapt.seiee.sjtu.edu.cn/">ADAPTers</a> (major efforts by Zhiling Zhang(<a href="https://github.com/blmoistawinde">@blmoistawinde</a>), Siyu Ren, Hongru Huang, Zelin Zhou, Yanzhu Guo)</p>
<p>Our list may not be complete. We will keep adding papers and improving it. <a href="CONTRIBUTING.md">Contributions</a> are welcomed!</p>
<h2 id="toc">Table of Contents</h2>
<ul>
<li><a href="#commonsense-papers">commonsense-papers</a></li>
<li><a href="#statistics">Statistics</a></li>
<li><a href="#tutorial-and-survey">Tutorial and Survey</a></li>
<li><a href="#resources-and-evaluation">Resources and Evaluation</a></li>
<li><a href="#commonsense-knowledge-basesmodels">Commonsense Knowledge Bases/Models</a></li>
<li><a href="#related-knowledge-bases">Related Knowledge Bases</a></li>
<li><a href="#datasets-and-benchmarks">Datasets and Benchmarks</a></li>
<li><a href="#evaluation-and-probing">Evaluation and Probing</a></li>
<li><a href="#knowledge-mining-knowledge-base-completion">Knowledge Mining (Knowledge Base Completion)</a></li>
<li><a href="#applications">Applications</a></li>
<li><a href="#natural-language-inference-nli-and-commonsense-reasoning-csr">natural language inference (NLI) and commonsense reasoning (CSR)</a></li>
<li><a href="#machine-reading-comprehension-mrc">Machine Reading Comprehension (MRC)</a></li>
<li><a href="#generation">Generation</a></li>
<li><a href="#question-answering-qa">Question Answering (QA)</a></li>
<li><a href="#vision-robotics-multimodal-grounding-and-speech">Vision, Robotics, Multimodal, Grounding and Speech</a></li>
</ul>
<h2>Statistics</h2>
<p>Total papers in this repo: <anchor id="cnt">64</anchor> .</p>
<p><strong>Research Keywords</strong></p>
<p>Non-stopping words in title, indicating the hot topics in this field.</p>
<p><anchor id="keyword"></p>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>count</th>
</tr>
</thead>
<tbody>
<tr>
<th>reasoning</th>
<td>13</td>
</tr>
<tr>
<th>graph</th>
<td>8</td>
</tr>
<tr>
<th>question</th>
<td>6</td>
</tr>
<tr>
<th>challenge</th>
<td>5</td>
</tr>
<tr>
<th>common</th>
<td>5</td>
</tr>
<tr>
<th>sense</th>
<td>5</td>
</tr>
<tr>
<th>model</th>
<td>5</td>
</tr>
<tr>
<th>pre</th>
<td>4</td>
</tr>
<tr>
<th>text</th>
<td>4</td>
</tr>
<tr>
<th>story</th>
<td>4</td>
</tr>
</tbody>
</table>
<p></anchor>
<br/></p>
<p><strong>Researchers</strong></p>
<p>Most active researchers in this field</p>
<p><anchor id="researcher"></p>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>count</th>
</tr>
</thead>
<tbody>
<tr>
<th><a href="https://www.semanticscholar.org/author/1699545">Yejin Choi</a></th>
<td>14</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/1857797">Chandra Bhagavatula</a></th>
<td>7</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/2691021">Antoine Bosselut</a></th>
<td>7</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/51583409">Bill Yuchen Lin</a></th>
<td>6</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/39227408">Ronan Le Bras</a></th>
<td>5</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/95882703">Yangqiu Song</a></th>
<td>5</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/48212577">Hongming Zhang</a></th>
<td>4</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/2516777">Hannah Rashkin</a></th>
<td>4</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/1796651">Kenny Q. Zhu</a></th>
<td>4</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/1384550891">Xiang Ren</a></th>
<td>4</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/2729164">Maarten Sap</a></th>
<td>4</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/144590225">Dan Roth</a></th>
<td>4</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/3103343">Vered Shwartz</a></th>
<td>3</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/8805254">Chaitanya Malaviya</a></th>
<td>3</td>
</tr>
<tr>
<th><a href="https://www.semanticscholar.org/author/150341221">Wangchunshu Zhou</a></th>
<td>2</td>
</tr>
</tbody>
</table>
<p></anchor></p>
<p><strong>Venues</strong></p>
<p><anchor id="venue"></p>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>count</th>
</tr>
</thead>
<tbody>
<tr>
<th>ACL</th>
<td>10</td>
</tr>
<tr>
<th>EMNLP</th>
<td>9</td>
</tr>
<tr>
<th>arxiv</th>
<td>8</td>
</tr>
<tr>
<th>AAAI</th>
<td>6</td>
</tr>
<tr>
<th>AKBC</th>
<td>3</td>
</tr>
</tbody>
</table>
<p></anchor></p>
<p><br/></p>
<h2>Tutorial and Survey</h2>
<p><strong>Recent Advances in Natural Language Inference: A Survey of Benchmarks, Resources, and Approaches</strong> arxiv 2019 <a href="https://arxiv.org/pdf/1904.01172">paper</a> (Citations: 27, 2 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1904.01172" style="float:left"></div> <br/></p>
<p><em>Shane Storks, Qiaozi Gao, Joyce Y. Chai</em></p>
<p><strong>T6: Commonsense Reasoning for Natural Language Processing.</strong> ACL 2020. <a href="https://slideslive.com/38931667/t6-commonsense-reasoning-for-natural-language-processing">slides and video</a></p>
<p><em>Antoine Bosselut, Dan Roth, Maarten Sap, Vered Shwartz, Yejin Choi</em></p>
<p><a href="#toc">back to table of contents</a></p>
<h2>Resources and Evaluation</h2>
<h3>Commonsense Knowledge Bases/Models</h3>
<p><br/></p>
<p><strong>CYC: Toward programs with common sense</strong> Communications of the ACM 1990 <a href="https://github.com/asanchez75/opencyc">data</a> <a href="https://www.cyc.com/">homepage</a></p>
<p><strong>Commonsense Causal Reasoning between Short Texts</strong> (CausalNet) KR 2016 <a href="https://dl.acm.org/doi/abs/10.5555/3032027.3032078">paper</a> <a href="https://adapt.seiee.sjtu.edu.cn/project_causal.html">homepage</a> <a href="https://github.com/cs-zyluo/CausalNet">code</a> (Citations: 35, 9 influential) </p>
<p><em>Zhiyi Luo, Yuchen Sha, Kenny Q. Zhu, Seung-won Hwang, Zhongyuan Wang</em></p>
<p><strong>ConceptNet 5.5: an open multilingual graph of general knowledge</strong> AAAI 2017. <a href="https://arxiv.org/pdf/1612.03975">paper</a> <a href="https://conceptnet.io/">homepage</a> <a href="https://github.com/commonsense/conceptnet5">repo</a> <a href="https://github.com/commonsense/conceptnet-numberbatch">embedding(ConceptNet Numberbatch) repo</a> (Citations: 653, 159 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1612.03975" style="float:left"></div> <br/></p>
<p><em>Robyn Speer, Joshua Chin, Catherine Havasi</em></p>
<p><strong>ATOMIC: An Atlas of Machine Commonsense for If-Then Reasoning</strong> AAAI 2019. <a href="https://arxiv.org/pdf/1811.00146.pdf">paper</a> <a href="https://homes.cs.washington.edu/~msap/atomic/">homepage</a> (Citations: 170, 41 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1811.00146" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1609/AAAI.V33I01.33013027" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Maarten Sap, Ronan LeBras, Emily Allaway, Chandra Bhagavatula, Nicholas Lourie, Hannah Rashkin, Brendan Roof, Noah A. Smith, Yejin Choi</em></p>
<p><strong>COMET: Commonsense Transformers for Automatic Knowledge Graph Construction</strong> ACL 2019 <a href="https://arxiv.org/abs/1906.05317">paper</a> <a href="https://mosaickg.apps.allenai.org/">homepage</a> <a href="https://github.com/atcbosselut/comet-commonsense">code</a> (Citations: 136, 27 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1906.05317" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/P19-1470" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Antoine Bosselut, Hannah Rashkin, Maarten Sap, Chaitanya Malaviya, Asli Celikyilmaz, Yejin Choi</em></p>
<p><strong>ASER: A Large-scale Eventuality Knowledge Graph.</strong> WWW 2020 <a href="https://arxiv.org/pdf/1905.00270">paper</a> <a href="https://hkust-knowcomp.github.io/ASER/">homepage</a> <a href="https://github.com/HKUST-KnowComp/ASER">code</a> (Citations: 17, 3 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1905.00270" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1145/3366423.3380107" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Hongming Zhang, Xin Liu, Haojie Pan, Yangqiu Song, Cane Wing-Ki Leung</em></p>
<p><strong>Commonsense Properties from Query Logs and Question Answering Forums</strong> (quasimodo) CIKM 2019 <a href="https://arxiv.org/pdf/1905.10989.pdf">paper</a> <a href="https://www.mpi-inf.mpg.de/departments/databases-and-information-systems/research/yago-naga/commonsense/quasimodo/">homepage</a> <a href="https://github.com/Aunsiels/CSK">code</a> (Citations: 13, 1 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1905.10989" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1145/3357384.3357955" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Julien Romero, Simon Razniewski, Koninika Pal, Jeff Z. Pan, Archit Sakhadeo, Gerhard Weikum</em></p>
<p><strong>TransOMCS: From Linguistic Graphs to Commonsense Knowledge</strong> IJCAI 2020 <a href="https://arxiv.org/pdf/2005.00206">paper</a> <a href="https://hkust-knowcomp.github.io/ASER/">homepage</a> <a href="https://github.com/HKUST-KnowComp/TransOMCS">code</a> (Citations: 10, 2 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.00206" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.24963/ijcai.2020/550" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Hongming Zhang, Daniel Khashabi, Yangqiu Song, Dan Roth</em></p>
<p><strong>Paragraph-Level Commonsense Transformers with Recurrent Memory</strong> (PARA-COMeT) arxiv 2020 <a href="https://arxiv.org/pdf/2010.01486">paper</a> (Citations: 0) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2010.01486" style="float:left"></div> <br/></p>
<p><em>Saadia Gabriel, Chandra Bhagavatula, Vered Shwartz, Ronan Le Bras, Maxwell Forbes, Yejin Choi</em></p>
<p><strong>COMET-ATOMIC 2020: On Symbolic and Neural Commonsense Knowledge Graphs</strong> arxiv 2020 <a href="https://arxiv.org/pdf/2010.05953.pdf">paper</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2010.05953" style="float:left"></div> <br/></p>
<p><em>Jena D. Hwang, Chandra Bhagavatula, Ronan Le Bras, Jeff Da, Keisuke Sakaguchi, Antoine Bosselut, Yejin Choi</em></p>
<p><strong>CommonGen: A Constrained Text Generation Challenge for Generative Commonsense Reasoning</strong> EMNLP 2020 Findings <a href="https://arxiv.org/pdf/1911.03705">paper</a> <a href="https://inklab.usc.edu/CommonGen/index.html">homepage</a> (Citations: 13, 3 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1911.03705" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.findings-emnlp.165" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Bill Yuchen Lin, Wangchunshu Zhou, Ming Shen, Pei Zhou, Chandra Bhagavatula, Yejin Choi, Xiang Ren</em></p>
<h3>Related Knowledge Bases</h3>
<p><br/></p>
<p><strong>WordNet: A Lexical Database for English</strong> Communications of the ACM Vol. 38, No. 11: 39-41. 1995. <a href="https://wordnet.princeton.edu/">homepage</a></p>
<p><em>George A. Miller</em></p>
<p><strong>Toward an Architecture for Never-Ending Language Learning</strong> (NELL) AAAI 2010 <a href="http://rtw.ml.cmu.edu/papers/carlson-aaai10.pdf">paper</a> <a href="http://rtw.ml.cmu.edu/rtw/">homepage</a></p>
<p><em>Andrew Carlson, Justin Betteridge, Bryan Kisiel, Burr Settles, Estevam R. Hruschka Jr., and Tom M. Mitchell</em></p>
<p><strong>Probase: a probabilistic taxonomy for text understanding</strong> SIGMOD 2012 <a href="https://dl.acm.org/doi/10.1145/2213836.2213891">paper</a> <a href="https://www.microsoft.com/en-us/research/project/probase/">homepage</a></p>
<p><em>Wentao Wu, Hongsong Li, Haixun Wang, Kenny Q. Zhu</em></p>
<p><strong>DBpedia – A Large-scale, Multilingual Knowledge Base Extracted from Wikipedia</strong>. Semantic Web 2012 <a href="http://svn.aksw.org/papers/2013/SWJ_DBpedia/public.pdf">paper</a> <a href="https://wiki.dbpedia.org/">homepage</a></p>
<p><em>Jens Lehmann, Robert Isele, Max Jakob, Anja Jentzsch, Dimitris Kontokostas, Pablo N. Mendes, Sebastian Hellmann, Mohamed Morsey, Patrick van Kleef, Sören Auer, Christian Bizer</em></p>
<h3>Datasets and Benchmarks</h3>
<p><br/></p>
<p><strong>A Corpus and Cloze Evaluation for Deeper Understanding of Commonsense Stories</strong> (Story Cloze Test) NAACL 2016 <a href="https://www.aclweb.org/anthology/N16-1098.pdf">paper</a> <a href="https://cs.rochester.edu/nlp/rocstories/">homepage</a> (Citations: 204, 47 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-doi="10.18653/v1/N16-1098" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/N16-1098" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Nasrin Mostafazadeh, Nathanael Chambers, Xiaodong He, Devi Parikh, Dhruv Batra, Lucy Vanderwende, Pushmeet Kohli, James Allen</em></p>
<p><strong>Modeling Naive Psychology of Characters in Simple Commonsense Stories</strong> ACL 2018 <a href="https://arxiv.org/pdf/1805.06533">paper</a> <a href="https://uwnlp.github.io/storycommonsense/">homepage</a> (Citations: 33, 7 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1805.06533" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/P18-1213" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Hannah Rashkin, Antoine Bosselut, Maarten Sap, Kevin Knight, Yejin Choi</em></p>
<p><strong>SemEval-2018 Task 11: Machine Comprehension Using Commonsense Knowledge</strong> SemEval 2018 <a href="https://www.aclweb.org/anthology/S18-1119.pdf">paper</a> (Citations: 86, 14 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-doi="10.18653/v1/S18-1119" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/S18-1119" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Simon Ostermann, Michael Roth, Ashutosh Modi, Stefan Thater, Manfred Pinkal</em></p>
<p><strong>Reasoning about Actions and State Changes by Injecting Commonsense Knowledge</strong> (ProPara) EMNLP 2018 <a href="https://arxiv.org/pdf/1808.10012">paper</a> <a href="https://github.com/allenai/propara">code</a> (Citations: 42, 7 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1808.10012" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/D18-1006" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Niket Tandon, Bhavana Dalvi Mishra, Joel Grus, Wen-tau Yih, Antoine Bosselut, Peter Clark</em></p>
<p><strong>CommonSenseqa: A question answering challenge targeting commonsense knowledge</strong> NAACL 2019 <a href="https://arxiv.org/abs/1811.00937">paper</a> <a href="https://www.tau-nlp.org/commonsenseqa">homepage</a> <a href="https://github.com/jonathanherzig/commonsenseqa">code</a> (Citations: 161, 53 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1811.00937" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/N19-1421" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Alon Talmor, Jonathan Herzig, Nicholas Lourie, Jonathan Berant</em></p>
<p><strong>From Recognition to Cognition: Visual Commonsense Reasoning</strong> CVPR 2019 <a href="https://arxiv.org/pdf/1811.10830">paper</a> <a href="https://github.com/rowanz/r2c/">code</a> <a href="http://visualcommonsense.com/">homepage</a> (Citations: 160, 35 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1811.10830" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1109/CVPR.2019.00688" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Rowan Zellers, Yonatan Bisk, Ali Farhadi, Yejin Choi</em></p>
<p><strong>oLMpics -- On what Language Model Pre-training Captures</strong> TACL 2019 <a href="https://arxiv.org/pdf/1912.13283.pdf">paper</a> <a href="https://github.com/alontalmor/oLMpics">code</a> <a href="https://github.com/alontalmor/oLMpics">homepage</a> (Citations: 61, 7 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1912.13283" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1162/tacl_a_00342" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Alon Talmor, Yanai Elazar, Yoav Goldberg, Jonathan Berant</em></p>
<p><strong>ABDUCTIVE COMMONSENSE REASONING</strong> (ART) ICLR 2020 <a href="https://arxiv.org/pdf/1908.05739">paper</a> <a href="http://abductivecommonsense.xyz/">homepage</a> (Citations: 64, 7 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1908.05739" style="float:left"></div> <br/></p>
<p><em>Chandra Bhagavatula, Ronan Le Bras, Chaitanya Malaviya, Keisuke Sakaguchi, Ari Holtzman, Hannah Rashkin, Doug Downey, Scott Wen-tau Yih, Yejin Choi</em></p>
<p><strong>PIQA: Reasoning about Physical Commonsense in Natural Language</strong> AAAI 2020 <a href="https://arxiv.org/pdf/1911.11641.pdf">paper</a> <a href="https://yonatanbisk.com/piqa/">homepage</a> (Citations: 47, 6 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1911.11641" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1609/AAAI.V34I05.6239" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Yonatan Bisk, Rowan Zellers, Ronan Le Bras, Jianfeng Gao, Yejin Choi</em></p>
<p><strong>On the Role of Conceptualization in Commonsense Knowledge Graph Construction</strong> arxiv 2020 <a href="https://arxiv.org/pdf/2003.03239">paper</a> <a href="https://github.com/mutiann/ccc">code</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2003.03239" style="float:left"></div> <br/></p>
<p><em>Mutian He, Yangqiu Song, Kun Xu, Dong Yu</em></p>
<p><strong>Reasoning about Goals, Steps, and Temporal Ordering with WikiHow</strong> EMNLP 2020 <a href="https://arxiv.org/pdf/2009.07690">paper</a> <a href="https://github.com/zharry29/wikihow-goal-step">code</a> (Citations: 0) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2009.07690" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.emnlp-main.374" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Qing Lyu, Li Zhang, Chris Callison-Burch</em></p>
<p><strong>Birds have four legs?! NumerSense: Probing Numerical Commonsense Knowledge of Pre-trained Language Models</strong> EMNLP 2020 <a href="https://arxiv.org/pdf/2005.00683.pdf">paper</a> <a href="https://inklab.usc.edu/NumerSense/">homepage</a> (Citations: 4) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.00683" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.emnlp-main.557" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Bill Yuchen Lin, Seyeon Lee, Rahul Khanna and Xiang Ren</em></p>
<p><strong>A Review of Winograd Schema Challenge Datasets and Approaches</strong> arxiv 2020 <a href="https://arxiv.org/abs/2004.13831">paper</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2004.13831" style="float:left"></div> <br/></p>
<p><em>Vid Kocijan, Thomas Lukasiewicz, Ernest Davis, Gary Marcus, Leora Morgenstern</em></p>
<p><strong>Integrating External Event Knowledge for Script Learning</strong> COLING 2020 <a href="https://www.aclweb.org/anthology/2020.coling-main.27.pdf">paper</a> (Citations: 0) </p>
<p><em>Shangwen Lv, Fuqing Zhu, Songlin Hu</em></p>
<p><strong>Enriching Large-Scale Eventuality Knowledge Graph with Entailment Relations</strong> AKBC 2020 <a href="https://arxiv.org/pdf/2006.11824">paper</a> <a href="https://github.com/HKUST-KnowComp/ASER-EEG">code</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2006.11824" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.24432/C56K5H" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Changlong Yu, Hongming Zhang, Yangqiu Song, Wilfred Ng, Lifeng Shang</em></p>
<p><a href="#toc">back to table of contents</a></p>
<h3>Evaluation and Probing</h3>
<p><strong>Reporting Bias and Knowledge Acquisition</strong> AKBC 2013 <a href="https://dl.acm.org/doi/10.1145/2509558.2509563">paper</a></p>
<p><em>Jonathan Gordon, Benjamin Van Durme</em></p>
<p><strong>Do Neural Language Representations Learn Physical Commonsense?</strong> CogSci 2019 <a href="https://arxiv.org/pdf/1908.02899.pdf">paper</a> <a href="https://github.com/mbforbes/physical-commonsense">code</a> (Citations: 21, 1 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1908.02899" style="float:left"></div> <br/></p>
<p><em>Maxwell Forbes, Ari Holtzman, Yejin Choi</em></p>
<p><strong>How Reasonable are Common-Sense Reasoning Tasks: A Case-Study on the Winograd Schema Challenge and SWAG</strong> EMNLP 2019 <a href="https://arxiv.org/pdf/1811.01778">paper</a> (Citations: 17, 5 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1811.01778" style="float:left"></div> <br/></p>
<p><em>Paul Trichelair, Ali Emami, Adam Trischler, Kaheer Suleman, Jackie Chi Kit Cheung</em></p>
<p><strong>Joint Reasoning for Multi-Faceted Commonsense Knowledge</strong> AKBC 2020 <a href="https://arxiv.org/pdf/2001.04170.pdf">paper</a> <a href="https://dice.mpi-inf.mpg.de/">homepage</a> (Citations: 6) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2001.04170" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.24432/C58G6G" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Yohan Chalier, Simon Razniewski, Gerhard Weikum</em></p>
<p><strong>Does BERT Solve Commonsense Task via Commonsense Knowledge?</strong> arxiv 2020 <a href="https://arxiv.org/pdf/2008.03945">paper</a> (Citations: 3) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2008.03945" style="float:left"></div> <br/></p>
<p><em>Leyang Cui, Sijie Cheng, Yu Wu, Yue Zhang</em></p>
<p><strong>WinoWhy: A Deep Diagnosis of Essential Commonsense Knowledge for Answering Winograd Schema Challenge</strong> ACL 2020 <a href="https://arxiv.org/pdf/2005.05763">paper</a> <a href="https://github.com/HKUST-KnowComp/WinoWhy">code</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.05763" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.acl-main.508" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Hongming Zhang, Xinran Zhao, Yangqiu Song</em></p>
<p><a href="#toc">back to table of contents</a></p>
<h2>Knowledge Mining (Knowledge Base Completion)</h2>
<p><strong>Automatic Extraction of Commonsense LocatedNear Knowledge</strong> ACL 2018 <a href="https://arxiv.org/pdf/1711.04204">paper</a> <a href="https://github.com/adapt-sjtu/commonsense-locatednear">code</a> (Citations: 1) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1711.04204" style="float:left"></div> <br/></p>
<p><em>Frank F. Xu, Bill Yuchen Lin, Kenny Q. Zhu</em></p>
<p><strong>Commonsense Knowledge Mining from Pretrained Models</strong> EMNLP 2019 <a href="https://arxiv.org/pdf/1909.00505">paper</a> (Citations: 50, 6 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1909.00505" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/D19-1109" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Joe Davison, Joshua Feldman, Alexander Rush</em></p>
<p><strong>Language Models as Knowledge Bases?</strong> EMNLP 2019 <a href="https://arxiv.org/pdf/1909.01066.pdf">paper</a> <a href="https://github.com/facebookresearch/LAMA">code</a> (Citations: 214, 37 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1909.01066" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/D19-1250" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Fabio Petroni, Tim Rocktäschel, Patrick Lewis, Anton Bakhtin, Yuxiang Wu, Alexander H. Miller, Sebastian Riedel</em></p>
<p><strong>Commonsense Knowledge Base Completion with Structural and Semantic Context</strong> AAAI 2020 <a href="https://arxiv.org/pdf/1910.02915">paper</a> <a href="https://github.com/allenai/commonsense-kg-completion">code</a> (Citations: 14, 3 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1910.02915" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1609/AAAI.V34I03.5684" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Chaitanya Malaviya, Chandra Bhagavatula, Antoine Bosselut, Yejin Choi</em></p>
<p><strong>Temporal Common Sense Acquisition with Minimal Supervision</strong> ACL 2020 <a href="https://arxiv.org/pdf/2005.04304.pdf">paper</a> (Citations: 11) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.04304" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.acl-main.678" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Ben Zhou, Qiang Ning, Daniel Khashabi, Dan Roth</em></p>
<p><strong>Learning Physical Common Sense as Knowledge Graph Completion via BERT Data Augmentation and Constrained Tucker Factorization</strong> EMNLP 2020 <a href="https://www.aclweb.org/anthology/2020.emnlp-main.266.pdf">paper</a> (Citations: 0) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-doi="10.18653/v1/2020.emnlp-main.266" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.emnlp-main.266" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Zhenjie Zhao, Evangelos Papalexakis, Xiaojuan Ma</em></p>
<p><a href="#toc">back to table of contents</a></p>
<h2>Applications</h2>
<h3>natural language inference (NLI) and commonsense reasoning (CSR)</h3>
<p><br/></p>
<p><strong>Neural natural language inference models enhanced with external knowledge</strong> ACL 2018 <a href="https://arxiv.org/abs/1711.04289">paper</a> <a href="https://github.com/lukecq1231/kim">code</a> (Citations: 115, 21 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1711.04289" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/P18-1224" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Qian Chen, Xiaodan Zhu, Zhen-Hua Ling, Diana Inkpen, Si Wei</em></p>
<p><strong>Incorporating Structured Commonsense Knowledge in Story Completion</strong> AAAI 2019 <a href="https://arxiv.org/pdf/1811.00625">paper</a> (Citations: 17, 4 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1811.00625" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1609/aaai.v33i01.33016244" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Jiaao Chen, Jianshu Chen, Zhou Yu</em></p>
<p><strong>Contrastive Self-Supervised Learning for Commonsense Reasoning</strong> ACL 2020 <a href="https://arxiv.org/pdf/2005.00669.pdf">paper</a> <a href="https://github.com/SAP-samples/acl2020-commonsense/">code</a> (Citations: 5) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.00669" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.acl-main.671" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Tassilo Klein, Moin Nabi</em></p>
<p><strong>Do Language Embeddings Capture Scales?</strong> EMNLP Findings 2020 <a href="https://arxiv.org/pdf/2010.05345">paper</a> <a href="https://github.com/google-research-datasets/numbert">code</a> (Citations: 0) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2010.05345" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.findings-emnlp.439" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Xikun Zhang, Deepak Ramachandran, Ian Tenney, Yanai Elazar, Dan Roth</em></p>
<p><strong>Differentiable Open-Ended Commonsense Reasoning</strong> arxiv 2020 <a href="https://arxiv.org/pdf/2010.14439">paper</a> (Citations: 1) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2010.14439" style="float:left"></div> <br/></p>
<p><em>Bill Yuchen Lin, Haitian Sun, Bhuwan Dhingra, Manzil Zaheer, Xiang Ren, William W. Cohen</em></p>
<p><strong>Pre-training Text-to-Text Transformers for Concept-centric Common Sense</strong> arxiv 2020 <a href="https://arxiv.org/pdf/2011.07956">paper</a> <a href="https://anonymous.4open.science/r/6fdeed55-ec2c-4ffa-aee8-0cc3b7f5ade5/">code</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2011.07956" style="float:left"></div> <br/></p>
<p><em>Wangchunshu Zhou, Dong-Ho Lee, Ravi Kiran Selvam, Seyeon Lee, Bill Yuchen Lin, Xiang Ren</em></p>
<p><strong>Enhanced Story Representation by ConceptNet for Predicting Story Endings</strong> CIKM 2020 <a href="https://dl.acm.org/doi/abs/10.1145/3340531.3417466">paper</a> <a href="https://github.com/shanshan437421/Simplification">code</a></p>
<p><em>Shanshan Huang, Kenny Q. Zhu, Qianzi Liao, Libin Shen and Yinggong Zhao</em></p>
<h3>Machine Reading Comprehension (MRC)</h3>
<p><br/></p>
<p><strong>Enhancing Pre-Trained Language Representations with Rich Knowledge for Machine Reading Comprehension</strong> ACL 2019 <a href="https://www.aclweb.org/anthology/P19-1226.pdf">paper</a> <a href="https://github.com/PaddlePaddle/Research/tree/master/NLP/ACL2019-KTNET">code</a> (Citations: 37, 8 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-doi="10.18653/v1/P19-1226" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/P19-1226" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>An Yang, Quan Wang, Jing Liu, Kai Liu, Yajuan Lyu, Hua Wu, Qiaoqiao She, Sujian Li</em></p>
<h3>Generation</h3>
<p><br/></p>
<p><strong>Diverse and Informative Dialogue Generation with Context-Specific Commonsense Knowledge Awareness</strong> ACL 2020 <a href="https://www.aclweb.org/anthology/2020.acl-main.515.pdf">paper</a> <a href="https://github.com/pku-orangecat/ACL2020-ConKADI">code</a> (Citations: 7, 1 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-doi="10.18653/v1/2020.acl-main.515" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.acl-main.515" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Sixing Wu, Ying Li, Dawei Zhang, Yang Zhou, Zhonghai Wu</em></p>
<p><strong>Language Generation with Multi-Hop Reasoning on Commonsense Knowledge Graph</strong> EMNLP 2020 <a href="https://arxiv.org/pdf/2009.11692.pdf">paper</a> (Citations: 2) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2009.11692" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.emnlp-main.54" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Haozhe Ji, Pei Ke, Shaohan Huang, Furu Wei, Xiaoyan Zhu, Minlie Huang</em></p>
<h3>Question Answering (QA)</h3>
<p><br/></p>
<p><strong>Dynamic Knowledge Graph Construction for Zero-shot Commonsense Question Answering</strong> arxiv 2019 <a href="https://arxiv.org/pdf/1911.03876">paper</a> (Citations: 1) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1911.03876" style="float:left"></div> <br/></p>
<p><em>Antoine Bosselut, Yejin Choi</em></p>
<p><strong>Adversarial Training for Commonsense Inference</strong> ACL 2020 RepL4NLP workshop <a href="https://arxiv.org/pdf/2005.08156">paper</a> (Citations: 3, 1 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.08156" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.repl4nlp-1.8" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Lis Pereira, Xiaodong Liu, Fei Cheng, Masayuki Asahara, Ichiro Kobayashi</em></p>
<p><strong>Unsupervised Commonsense Question Answering with Self-Talk</strong> EMNLP 2020 <a href="https://arxiv.org/pdf/2004.05483">paper</a> <a href="https://github.com/vered1986/self_talk">code</a> (Citations: 22, 1 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2004.05483" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.emnlp-main.373" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Vered Shwartz, Peter West, Ronan Le Bras, Chandra Bhagavatula, Yejin Choi</em></p>
<p><strong>Scalable Multi-Hop Relational Reasoning for Knowledge-Aware Question Answering</strong> EMNLP 2020 <a href="https://arxiv.org/pdf/2005.00646">paper</a> <a href="https://github.com/INK-USC/MHGRN">code</a> (Citations: 9, 2 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2005.00646" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.18653/v1/2020.emnlp-main.99" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Yanlin Feng, Xinyue Chen, Bill Yuchen Lin, Peifeng Wang, Jun Yan, Xiang Ren</em></p>
<p><strong>Benchmarking Knowledge-Enhanced Commonsense Question Answering via Knowledge-to-Text Transformation</strong> AAAI 2021 <a href="https://arxiv.org/pdf/2101.00760">paper</a> (Citations: 0) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2101.00760" style="float:left"></div> <br/></p>
<p><em>Ning Bian, Xianpei Han, Bo Chen, Le Sun</em></p>
<h3>Vision, Robotics, Multimodal, Grounding and Speech</h3>
<p><br/></p>
<p><strong>Enabling Robots to Understand Incomplete Natural Language Instructions Using Commonsense Reasoning</strong> ICRA 2020 <a href="https://arxiv.org/pdf/1904.12907">paper</a> (Citations: 10) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="1904.12907" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1109/ICRA40945.2020.9197315" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Haonan Chen; Hao Tan; Alan Kuntz; Mohit Bansal; Ron Alterovitz</em></p>
<p><strong>Visual Commonsense R-CNN</strong> CVPR 2020 <a href="https://arxiv.org/pdf/2002.12204">paper</a> <a href="https://github.com/Wangt-CN/VC-R-CNN">code</a> (Citations: 14, 3 influential) <div data-badge-popover="right" data-badge-type="2" data-hide-no-mentions="true" class="altmetric-embed" data-arxiv-id="2002.12204" style="float:left"></div> <span class="__dimensions_badge_embed__" data-doi="10.1109/cvpr42600.2020.01077" data-style="small_rectangle" style="float:left"></span> <br/></p>
<p><em>Tan Wang, Jianqiang Huang, Hanwang Zhang, Qianru Sun</em></p>
<p><a href="#toc">back to table of contents</a></p>