From 02c37794a1a1f69625a9ebb7e32278c81c1c8e79 Mon Sep 17 00:00:00 2001 From: bonjour-npy <2366353971@qq.com> Date: Tue, 31 Oct 2023 17:32:30 +0800 Subject: [PATCH] Deploy website - based on 8e4a2ecb009702dadf0ffd623f50b966f587efaf --- 404.html | 2 +- assets/css/styles.52abc4d3.css | 1 + assets/css/styles.eb1ce4d6.css | 1 - "blog/PyTroch\345\237\272\347\241\200/index.html" | 2 +- blog/archive/index.html | 2 +- .../index.html" | 2 +- blog/index.html | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- "docs/Algorithms/STL\346\250\241\346\235\277/index.html" | 2 +- docs/Algorithms/intro/index.html | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- docs/Deep Learning/intro/index.html | 2 +- .../K-fold Cross-validation/index.html" | 2 +- .../Logistic Regression/index.html" | 2 +- .../PyTroch\345\237\272\347\241\200/index.html" | 2 +- .../index.html" | 2 +- .../\345\215\267\347\247\257\345\261\202/index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../\346\261\240\345\214\226\345\261\202/index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../Visdom\345\217\257\350\247\206\345\214\226/index.html" | 2 +- .../AlexNet/index.html" | 2 +- .../LeNet/index.html" | 2 +- .../Perceptron/index.html" | 2 +- .../Attention Is All You Need/index.html" | 2 +- .../Self-Attention/index.html" | 2 +- docs/Linux/intro/index.html | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- docs/Others/intro/index.html | 2 +- .../\345\221\212\347\244\272\346\240\217/index.html" | 2 +- .../index.html" | 2 +- "docs/\346\216\250\345\205\215/intro/index.html" | 2 +- .../index.html" | 2 +- .../\346\246\202\347\216\207\350\256\272/index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../intro/index.html" | 2 +- .../GeekOS project 0/index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../index.html" | 2 +- .../Transformer and self-attention/index.html" | 2 +- "docs/\351\270\243\350\260\242/intro/index.html" | 2 +- img/apple.svg | 1 + img/avengers.svg | 6 ------ index.html | 2 +- markdown-page/index.html | 2 +- search-index.json | 2 +- search/index.html | 2 +- 60 files changed, 58 insertions(+), 63 deletions(-) create mode 100644 assets/css/styles.52abc4d3.css delete mode 100644 assets/css/styles.eb1ce4d6.css create mode 100644 img/apple.svg delete mode 100644 img/avengers.svg diff --git a/404.html b/404.html index 10ec84a64..4399eeb6d 100644 --- a/404.html +++ b/404.html @@ -9,7 +9,7 @@ - + diff --git a/assets/css/styles.52abc4d3.css b/assets/css/styles.52abc4d3.css new file mode 100644 index 000000000..746598b7d --- /dev/null +++ b/assets/css/styles.52abc4d3.css @@ -0,0 +1 @@ +.col,.container{padding:0 var(--ifm-spacing-horizontal);width:100%}.markdown>h2,.markdown>h3,.markdown>h4,.markdown>h5,.markdown>h6{margin-bottom:calc(var(--ifm-heading-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown li,body{word-wrap:break-word}body,ol ol,ol ul,ul ol,ul ul{margin:0}pre,table{overflow:auto}blockquote,pre{margin:0 0 var(--ifm-spacing-vertical)}.breadcrumbs__link,.button{transition-timing-function:var(--ifm-transition-timing-default)}.button,code{vertical-align:middle}.button--outline.button--active,.button--outline:active,.button--outline:hover,:root{--ifm-button-color:var(--ifm-font-color-base-inverse)}.menu__link:hover,a{transition:color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.navbar--dark,:root{--ifm-navbar-link-hover-color:var(--ifm-color-primary)}.menu,.navbar-sidebar{overflow-x:hidden}:root,html[data-theme=dark]{--ifm-color-emphasis-500:var(--ifm-color-gray-500)}.toggleButton_gllP,html{-webkit-tap-highlight-color:transparent}*,.loadingRing_RJI3 div{box-sizing:border-box}.clean-list,.containsTaskList_mC6p,.details_lb9f>summary,.dropdown__menu,.menu__list{list-style:none}:root{--ifm-color-scheme:light;--ifm-dark-value:10%;--ifm-darker-value:15%;--ifm-darkest-value:30%;--ifm-light-value:15%;--ifm-lighter-value:30%;--ifm-lightest-value:50%;--ifm-contrast-background-value:90%;--ifm-contrast-foreground-value:70%;--ifm-contrast-background-dark-value:70%;--ifm-contrast-foreground-dark-value:90%;--ifm-color-primary:#3578e5;--ifm-color-secondary:#ebedf0;--ifm-color-success:#00a400;--ifm-color-info:#54c7ec;--ifm-color-warning:#ffba00;--ifm-color-danger:#fa383e;--ifm-color-primary-dark:#306cce;--ifm-color-primary-darker:#2d66c3;--ifm-color-primary-darkest:#2554a0;--ifm-color-primary-light:#538ce9;--ifm-color-primary-lighter:#72a1ed;--ifm-color-primary-lightest:#9abcf2;--ifm-color-primary-contrast-background:#ebf2fc;--ifm-color-primary-contrast-foreground:#102445;--ifm-color-secondary-dark:#d4d5d8;--ifm-color-secondary-darker:#c8c9cc;--ifm-color-secondary-darkest:#a4a6a8;--ifm-color-secondary-light:#eef0f2;--ifm-color-secondary-lighter:#f1f2f5;--ifm-color-secondary-lightest:#f5f6f8;--ifm-color-secondary-contrast-background:#fdfdfe;--ifm-color-secondary-contrast-foreground:#474748;--ifm-color-success-dark:#009400;--ifm-color-success-darker:#008b00;--ifm-color-success-darkest:#007300;--ifm-color-success-light:#26b226;--ifm-color-success-lighter:#4dbf4d;--ifm-color-success-lightest:#80d280;--ifm-color-success-contrast-background:#e6f6e6;--ifm-color-success-contrast-foreground:#003100;--ifm-color-info-dark:#4cb3d4;--ifm-color-info-darker:#47a9c9;--ifm-color-info-darkest:#3b8ba5;--ifm-color-info-light:#6ecfef;--ifm-color-info-lighter:#87d8f2;--ifm-color-info-lightest:#aae3f6;--ifm-color-info-contrast-background:#eef9fd;--ifm-color-info-contrast-foreground:#193c47;--ifm-color-warning-dark:#e6a700;--ifm-color-warning-darker:#d99e00;--ifm-color-warning-darkest:#b38200;--ifm-color-warning-light:#ffc426;--ifm-color-warning-lighter:#ffcf4d;--ifm-color-warning-lightest:#ffdd80;--ifm-color-warning-contrast-background:#fff8e6;--ifm-color-warning-contrast-foreground:#4d3800;--ifm-color-danger-dark:#e13238;--ifm-color-danger-darker:#d53035;--ifm-color-danger-darkest:#af272b;--ifm-color-danger-light:#fb565b;--ifm-color-danger-lighter:#fb7478;--ifm-color-danger-lightest:#fd9c9f;--ifm-color-danger-contrast-background:#ffebec;--ifm-color-danger-contrast-foreground:#4b1113;--ifm-color-white:#fff;--ifm-color-black:#000;--ifm-color-gray-0:var(--ifm-color-white);--ifm-color-gray-100:#f5f6f7;--ifm-color-gray-200:#ebedf0;--ifm-color-gray-300:#dadde1;--ifm-color-gray-400:#ccd0d5;--ifm-color-gray-500:#bec3c9;--ifm-color-gray-600:#8d949e;--ifm-color-gray-700:#606770;--ifm-color-gray-800:#444950;--ifm-color-gray-900:#1c1e21;--ifm-color-gray-1000:var(--ifm-color-black);--ifm-color-emphasis-0:var(--ifm-color-gray-0);--ifm-color-emphasis-100:var(--ifm-color-gray-100);--ifm-color-emphasis-200:var(--ifm-color-gray-200);--ifm-color-emphasis-300:var(--ifm-color-gray-300);--ifm-color-emphasis-400:var(--ifm-color-gray-400);--ifm-color-emphasis-600:var(--ifm-color-gray-600);--ifm-color-emphasis-700:var(--ifm-color-gray-700);--ifm-color-emphasis-800:var(--ifm-color-gray-800);--ifm-color-emphasis-900:var(--ifm-color-gray-900);--ifm-color-emphasis-1000:var(--ifm-color-gray-1000);--ifm-color-content:var(--ifm-color-emphasis-900);--ifm-color-content-inverse:var(--ifm-color-emphasis-0);--ifm-color-content-secondary:#525860;--ifm-background-color:#0000;--ifm-background-surface-color:var(--ifm-color-content-inverse);--ifm-global-border-width:1px;--ifm-global-radius:0.4rem;--ifm-hover-overlay:#0000000d;--ifm-font-color-base:var(--ifm-color-content);--ifm-font-color-base-inverse:var(--ifm-color-content-inverse);--ifm-font-color-secondary:var(--ifm-color-content-secondary);--ifm-font-family-base:system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";--ifm-font-family-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--ifm-font-size-base:100%;--ifm-font-weight-light:300;--ifm-font-weight-normal:400;--ifm-font-weight-semibold:500;--ifm-font-weight-bold:700;--ifm-font-weight-base:var(--ifm-font-weight-normal);--ifm-line-height-base:1.65;--ifm-global-spacing:1rem;--ifm-spacing-vertical:var(--ifm-global-spacing);--ifm-spacing-horizontal:var(--ifm-global-spacing);--ifm-transition-fast:200ms;--ifm-transition-slow:400ms;--ifm-transition-timing-default:cubic-bezier(0.08,0.52,0.52,1);--ifm-global-shadow-lw:0 1px 2px 0 #0000001a;--ifm-global-shadow-md:0 5px 40px #0003;--ifm-global-shadow-tl:0 12px 28px 0 #0003,0 2px 4px 0 #0000001a;--ifm-z-index-dropdown:100;--ifm-z-index-fixed:200;--ifm-z-index-overlay:400;--ifm-container-width:1140px;--ifm-container-width-xl:1320px;--ifm-code-background:#f6f7f8;--ifm-code-border-radius:var(--ifm-global-radius);--ifm-code-font-size:90%;--ifm-code-padding-horizontal:0.1rem;--ifm-code-padding-vertical:0.1rem;--ifm-pre-background:var(--ifm-code-background);--ifm-pre-border-radius:var(--ifm-code-border-radius);--ifm-pre-color:inherit;--ifm-pre-line-height:1.45;--ifm-pre-padding:1rem;--ifm-heading-color:inherit;--ifm-heading-margin-top:0;--ifm-heading-margin-bottom:var(--ifm-spacing-vertical);--ifm-heading-font-family:var(--ifm-font-family-base);--ifm-heading-font-weight:var(--ifm-font-weight-bold);--ifm-heading-line-height:1.25;--ifm-h1-font-size:2rem;--ifm-h2-font-size:1.5rem;--ifm-h3-font-size:1.25rem;--ifm-h4-font-size:1rem;--ifm-h5-font-size:0.875rem;--ifm-h6-font-size:0.85rem;--ifm-image-alignment-padding:1.25rem;--ifm-leading-desktop:1.25;--ifm-leading:calc(var(--ifm-leading-desktop)*1rem);--ifm-list-left-padding:2rem;--ifm-list-margin:1rem;--ifm-list-item-margin:0.25rem;--ifm-list-paragraph-margin:1rem;--ifm-table-cell-padding:0.75rem;--ifm-table-background:#0000;--ifm-table-stripe-background:#00000008;--ifm-table-border-width:1px;--ifm-table-border-color:var(--ifm-color-emphasis-300);--ifm-table-head-background:inherit;--ifm-table-head-color:inherit;--ifm-table-head-font-weight:var(--ifm-font-weight-bold);--ifm-table-cell-color:inherit;--ifm-link-color:var(--ifm-color-primary);--ifm-link-decoration:none;--ifm-link-hover-color:var(--ifm-link-color);--ifm-link-hover-decoration:underline;--ifm-paragraph-margin-bottom:var(--ifm-leading);--ifm-blockquote-font-size:var(--ifm-font-size-base);--ifm-blockquote-border-left-width:2px;--ifm-blockquote-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-blockquote-padding-vertical:0;--ifm-blockquote-shadow:none;--ifm-blockquote-color:var(--ifm-color-emphasis-800);--ifm-blockquote-border-color:var(--ifm-color-emphasis-300);--ifm-hr-background-color:var(--ifm-color-emphasis-500);--ifm-hr-height:1px;--ifm-hr-margin-vertical:1.5rem;--ifm-scrollbar-size:7px;--ifm-scrollbar-track-background-color:#f1f1f1;--ifm-scrollbar-thumb-background-color:silver;--ifm-scrollbar-thumb-hover-background-color:#a7a7a7;--ifm-alert-background-color:inherit;--ifm-alert-border-color:inherit;--ifm-alert-border-radius:var(--ifm-global-radius);--ifm-alert-border-width:0px;--ifm-alert-border-left-width:5px;--ifm-alert-color:var(--ifm-font-color-base);--ifm-alert-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-alert-padding-vertical:var(--ifm-spacing-vertical);--ifm-alert-shadow:var(--ifm-global-shadow-lw);--ifm-avatar-intro-margin:1rem;--ifm-avatar-intro-alignment:inherit;--ifm-avatar-photo-size:3rem;--ifm-badge-background-color:inherit;--ifm-badge-border-color:inherit;--ifm-badge-border-radius:var(--ifm-global-radius);--ifm-badge-border-width:var(--ifm-global-border-width);--ifm-badge-color:var(--ifm-color-white);--ifm-badge-padding-horizontal:calc(var(--ifm-spacing-horizontal)*0.5);--ifm-badge-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-breadcrumb-border-radius:1.5rem;--ifm-breadcrumb-spacing:0.5rem;--ifm-breadcrumb-color-active:var(--ifm-color-primary);--ifm-breadcrumb-item-background-active:var(--ifm-hover-overlay);--ifm-breadcrumb-padding-horizontal:0.8rem;--ifm-breadcrumb-padding-vertical:0.4rem;--ifm-breadcrumb-size-multiplier:1;--ifm-breadcrumb-separator:url('data:image/svg+xml;utf8,');--ifm-breadcrumb-separator-filter:none;--ifm-breadcrumb-separator-size:0.5rem;--ifm-breadcrumb-separator-size-multiplier:1.25;--ifm-button-background-color:inherit;--ifm-button-border-color:var(--ifm-button-background-color);--ifm-button-border-width:var(--ifm-global-border-width);--ifm-button-font-weight:var(--ifm-font-weight-bold);--ifm-button-padding-horizontal:1.5rem;--ifm-button-padding-vertical:0.375rem;--ifm-button-size-multiplier:1;--ifm-button-transition-duration:var(--ifm-transition-fast);--ifm-button-border-radius:calc(var(--ifm-global-radius)*var(--ifm-button-size-multiplier));--ifm-button-group-spacing:2px;--ifm-card-background-color:var(--ifm-background-surface-color);--ifm-card-border-radius:calc(var(--ifm-global-radius)*2);--ifm-card-horizontal-spacing:var(--ifm-global-spacing);--ifm-card-vertical-spacing:var(--ifm-global-spacing);--ifm-toc-border-color:var(--ifm-color-emphasis-300);--ifm-toc-link-color:var(--ifm-color-content-secondary);--ifm-toc-padding-vertical:0.5rem;--ifm-toc-padding-horizontal:0.5rem;--ifm-dropdown-background-color:var(--ifm-background-surface-color);--ifm-dropdown-font-weight:var(--ifm-font-weight-semibold);--ifm-dropdown-link-color:var(--ifm-font-color-base);--ifm-dropdown-hover-background-color:var(--ifm-hover-overlay);--ifm-footer-background-color:var(--ifm-color-emphasis-100);--ifm-footer-color:inherit;--ifm-footer-link-color:var(--ifm-color-emphasis-700);--ifm-footer-link-hover-color:var(--ifm-color-primary);--ifm-footer-link-horizontal-spacing:0.5rem;--ifm-footer-padding-horizontal:calc(var(--ifm-spacing-horizontal)*2);--ifm-footer-padding-vertical:calc(var(--ifm-spacing-vertical)*2);--ifm-footer-title-color:inherit;--ifm-footer-logo-max-width:min(30rem,90vw);--ifm-hero-background-color:var(--ifm-background-surface-color);--ifm-hero-text-color:var(--ifm-color-emphasis-800);--ifm-menu-color:var(--ifm-color-emphasis-700);--ifm-menu-color-active:var(--ifm-color-primary);--ifm-menu-color-background-active:var(--ifm-hover-overlay);--ifm-menu-color-background-hover:var(--ifm-hover-overlay);--ifm-menu-link-padding-horizontal:0.75rem;--ifm-menu-link-padding-vertical:0.375rem;--ifm-menu-link-sublist-icon:url('data:image/svg+xml;utf8,');--ifm-menu-link-sublist-icon-filter:none;--ifm-navbar-background-color:var(--ifm-background-surface-color);--ifm-navbar-height:3.75rem;--ifm-navbar-item-padding-horizontal:0.75rem;--ifm-navbar-item-padding-vertical:0.25rem;--ifm-navbar-link-color:var(--ifm-font-color-base);--ifm-navbar-link-active-color:var(--ifm-link-color);--ifm-navbar-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-navbar-padding-vertical:calc(var(--ifm-spacing-vertical)*0.5);--ifm-navbar-shadow:var(--ifm-global-shadow-lw);--ifm-navbar-search-input-background-color:var(--ifm-color-emphasis-200);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-800);--ifm-navbar-search-input-placeholder-color:var(--ifm-color-emphasis-500);--ifm-navbar-search-input-icon:url('data:image/svg+xml;utf8,');--ifm-navbar-sidebar-width:83vw;--ifm-pagination-border-radius:var(--ifm-global-radius);--ifm-pagination-color-active:var(--ifm-color-primary);--ifm-pagination-font-size:1rem;--ifm-pagination-item-active-background:var(--ifm-hover-overlay);--ifm-pagination-page-spacing:0.2em;--ifm-pagination-padding-horizontal:calc(var(--ifm-spacing-horizontal)*1);--ifm-pagination-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-pagination-nav-border-radius:var(--ifm-global-radius);--ifm-pagination-nav-color-hover:var(--ifm-color-primary);--ifm-pills-color-active:var(--ifm-color-primary);--ifm-pills-color-background-active:var(--ifm-hover-overlay);--ifm-pills-spacing:0.125rem;--ifm-tabs-color:var(--ifm-font-color-secondary);--ifm-tabs-color-active:var(--ifm-color-primary);--ifm-tabs-color-active-border:var(--ifm-tabs-color-active);--ifm-tabs-padding-horizontal:1rem;--ifm-tabs-padding-vertical:1rem;--docusaurus-progress-bar-color:var(--ifm-color-primary);--docusaurus-announcement-bar-height:auto;--docusaurus-collapse-button-bg:#0000;--docusaurus-collapse-button-bg-hover:#0000001a;--doc-sidebar-width:300px;--doc-sidebar-hidden-width:30px;--docusaurus-tag-list-border:var(--ifm-color-emphasis-300)}.badge--danger,.badge--info,.badge--primary,.badge--secondary,.badge--success,.badge--warning{--ifm-badge-border-color:var(--ifm-badge-background-color)}.button--link,.button--outline{--ifm-button-background-color:#0000}html{-webkit-font-smoothing:antialiased;-webkit-text-size-adjust:100%;text-size-adjust:100%;background-color:var(--ifm-background-color);color:var(--ifm-font-color-base);color-scheme:var(--ifm-color-scheme);font:var(--ifm-font-size-base)/var(--ifm-line-height-base) var(--ifm-font-family-base);text-rendering:optimizelegibility}iframe{border:0;color-scheme:auto}.container{margin:0 auto;max-width:var(--ifm-container-width)}.container--fluid{max-width:inherit}.row{display:flex;flex-wrap:wrap;margin:0 calc(var(--ifm-spacing-horizontal)*-1)}.margin-bottom--none,.margin-vert--none,.markdown>:last-child{margin-bottom:0!important}.margin-top--none,.margin-vert--none{margin-top:0!important}.row--no-gutters{margin-left:0;margin-right:0}.margin-horiz--none,.margin-right--none{margin-right:0!important}.row--no-gutters>.col{padding-left:0;padding-right:0}.row--align-top{align-items:flex-start}.row--align-bottom{align-items:flex-end}.menuExternalLink_NmtK,.row--align-center{align-items:center}.row--align-stretch{align-items:stretch}.row--align-baseline{align-items:baseline}.col{--ifm-col-width:100%;flex:1 0;margin-left:0;max-width:var(--ifm-col-width)}.padding-bottom--none,.padding-vert--none{padding-bottom:0!important}.padding-top--none,.padding-vert--none{padding-top:0!important}.padding-horiz--none,.padding-left--none{padding-left:0!important}.padding-horiz--none,.padding-right--none{padding-right:0!important}.col[class*=col--]{flex:0 0 var(--ifm-col-width)}.col--1{--ifm-col-width:8.33333%}.col--offset-1{margin-left:8.33333%}.col--2{--ifm-col-width:16.66667%}.col--offset-2{margin-left:16.66667%}.col--3{--ifm-col-width:25%}.col--offset-3{margin-left:25%}.col--4{--ifm-col-width:33.33333%}.col--offset-4{margin-left:33.33333%}.col--5{--ifm-col-width:41.66667%}.col--offset-5{margin-left:41.66667%}.col--6{--ifm-col-width:50%}.col--offset-6{margin-left:50%}.col--7{--ifm-col-width:58.33333%}.col--offset-7{margin-left:58.33333%}.col--8{--ifm-col-width:66.66667%}.col--offset-8{margin-left:66.66667%}.col--9{--ifm-col-width:75%}.col--offset-9{margin-left:75%}.col--10{--ifm-col-width:83.33333%}.col--offset-10{margin-left:83.33333%}.col--11{--ifm-col-width:91.66667%}.col--offset-11{margin-left:91.66667%}.col--12{--ifm-col-width:100%}.col--offset-12{margin-left:100%}.margin-horiz--none,.margin-left--none{margin-left:0!important}.margin--none{margin:0!important}.margin-bottom--xs,.margin-vert--xs{margin-bottom:.25rem!important}.margin-top--xs,.margin-vert--xs{margin-top:.25rem!important}.margin-horiz--xs,.margin-left--xs{margin-left:.25rem!important}.margin-horiz--xs,.margin-right--xs{margin-right:.25rem!important}.margin--xs{margin:.25rem!important}.margin-bottom--sm,.margin-vert--sm{margin-bottom:.5rem!important}.margin-top--sm,.margin-vert--sm{margin-top:.5rem!important}.margin-horiz--sm,.margin-left--sm{margin-left:.5rem!important}.margin-horiz--sm,.margin-right--sm{margin-right:.5rem!important}.margin--sm{margin:.5rem!important}.margin-bottom--md,.margin-vert--md{margin-bottom:1rem!important}.margin-top--md,.margin-vert--md{margin-top:1rem!important}.margin-horiz--md,.margin-left--md{margin-left:1rem!important}.margin-horiz--md,.margin-right--md{margin-right:1rem!important}.margin--md{margin:1rem!important}.margin-bottom--lg,.margin-vert--lg{margin-bottom:2rem!important}.margin-top--lg,.margin-vert--lg{margin-top:2rem!important}.margin-horiz--lg,.margin-left--lg{margin-left:2rem!important}.margin-horiz--lg,.margin-right--lg{margin-right:2rem!important}.margin--lg{margin:2rem!important}.margin-bottom--xl,.margin-vert--xl{margin-bottom:5rem!important}.margin-top--xl,.margin-vert--xl{margin-top:5rem!important}.margin-horiz--xl,.margin-left--xl{margin-left:5rem!important}.margin-horiz--xl,.margin-right--xl{margin-right:5rem!important}.margin--xl{margin:5rem!important}.padding--none{padding:0!important}.padding-bottom--xs,.padding-vert--xs{padding-bottom:.25rem!important}.padding-top--xs,.padding-vert--xs{padding-top:.25rem!important}.padding-horiz--xs,.padding-left--xs{padding-left:.25rem!important}.padding-horiz--xs,.padding-right--xs{padding-right:.25rem!important}.padding--xs{padding:.25rem!important}.padding-bottom--sm,.padding-vert--sm{padding-bottom:.5rem!important}.padding-top--sm,.padding-vert--sm{padding-top:.5rem!important}.padding-horiz--sm,.padding-left--sm{padding-left:.5rem!important}.padding-horiz--sm,.padding-right--sm{padding-right:.5rem!important}.padding--sm{padding:.5rem!important}.padding-bottom--md,.padding-vert--md{padding-bottom:1rem!important}.padding-top--md,.padding-vert--md{padding-top:1rem!important}.padding-horiz--md,.padding-left--md{padding-left:1rem!important}.padding-horiz--md,.padding-right--md{padding-right:1rem!important}.padding--md{padding:1rem!important}.padding-bottom--lg,.padding-vert--lg{padding-bottom:2rem!important}.padding-top--lg,.padding-vert--lg{padding-top:2rem!important}.padding-horiz--lg,.padding-left--lg{padding-left:2rem!important}.padding-horiz--lg,.padding-right--lg{padding-right:2rem!important}.padding--lg{padding:2rem!important}.padding-bottom--xl,.padding-vert--xl{padding-bottom:5rem!important}.padding-top--xl,.padding-vert--xl{padding-top:5rem!important}.padding-horiz--xl,.padding-left--xl{padding-left:5rem!important}.padding-horiz--xl,.padding-right--xl{padding-right:5rem!important}.padding--xl{padding:5rem!important}code{background-color:var(--ifm-code-background);border:.1rem solid #0000001a;border-radius:var(--ifm-code-border-radius);font-family:var(--ifm-font-family-monospace);font-size:var(--ifm-code-font-size);padding:var(--ifm-code-padding-vertical) var(--ifm-code-padding-horizontal)}a code{color:inherit}pre{background-color:var(--ifm-pre-background);border-radius:var(--ifm-pre-border-radius);color:var(--ifm-pre-color);font:var(--ifm-code-font-size)/var(--ifm-pre-line-height) var(--ifm-font-family-monospace);padding:var(--ifm-pre-padding)}pre code{background-color:initial;border:none;font-size:100%;line-height:inherit;padding:0}kbd{background-color:var(--ifm-color-emphasis-0);border:1px solid var(--ifm-color-emphasis-400);border-radius:.2rem;box-shadow:inset 0 -1px 0 var(--ifm-color-emphasis-400);color:var(--ifm-color-emphasis-800);font:80% var(--ifm-font-family-monospace);padding:.15rem .3rem}h1,h2,h3,h4,h5,h6{color:var(--ifm-heading-color);font-family:var(--ifm-heading-font-family);font-weight:var(--ifm-heading-font-weight);line-height:var(--ifm-heading-line-height);margin:var(--ifm-heading-margin-top) 0 var(--ifm-heading-margin-bottom) 0}h1{font-size:var(--ifm-h1-font-size)}h2{font-size:var(--ifm-h2-font-size)}h3{font-size:var(--ifm-h3-font-size)}h4{font-size:var(--ifm-h4-font-size)}h5{font-size:var(--ifm-h5-font-size)}h6{font-size:var(--ifm-h6-font-size)}img{max-width:100%}img[align=right]{padding-left:var(--image-alignment-padding)}img[align=left]{padding-right:var(--image-alignment-padding)}.markdown{--ifm-h1-vertical-rhythm-top:3;--ifm-h2-vertical-rhythm-top:2;--ifm-h3-vertical-rhythm-top:1.5;--ifm-heading-vertical-rhythm-top:1.25;--ifm-h1-vertical-rhythm-bottom:1.25;--ifm-heading-vertical-rhythm-bottom:1}.markdown:after,.markdown:before{content:"";display:table}.markdown:after{clear:both}.markdown h1:first-child{--ifm-h1-font-size:3rem;margin-bottom:calc(var(--ifm-h1-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown>h2{--ifm-h2-font-size:2rem;margin-top:calc(var(--ifm-h2-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h3{--ifm-h3-font-size:1.5rem;margin-top:calc(var(--ifm-h3-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h4,.markdown>h5,.markdown>h6{margin-top:calc(var(--ifm-heading-vertical-rhythm-top)*var(--ifm-leading))}.markdown>p,.markdown>pre,.markdown>ul{margin-bottom:var(--ifm-leading)}.markdown li>p{margin-top:var(--ifm-list-paragraph-margin)}.markdown li+li{margin-top:var(--ifm-list-item-margin)}ol,ul{margin:0 0 var(--ifm-list-margin);padding-left:var(--ifm-list-left-padding)}ol ol,ul ol{list-style-type:lower-roman}ol ol ol,ol ul ol,ul ol ol,ul ul ol{list-style-type:lower-alpha}table{border-collapse:collapse;display:block;margin-bottom:var(--ifm-spacing-vertical)}table thead tr{border-bottom:2px solid var(--ifm-table-border-color)}table thead,table tr:nth-child(2n){background-color:var(--ifm-table-stripe-background)}table tr{background-color:var(--ifm-table-background);border-top:var(--ifm-table-border-width) solid var(--ifm-table-border-color)}table td,table th{border:var(--ifm-table-border-width) solid var(--ifm-table-border-color);padding:var(--ifm-table-cell-padding)}table th{background-color:var(--ifm-table-head-background);color:var(--ifm-table-head-color);font-weight:var(--ifm-table-head-font-weight)}table td{color:var(--ifm-table-cell-color)}strong{font-weight:var(--ifm-font-weight-bold)}a{color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}a:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button:hover,.text--no-decoration,.text--no-decoration:hover,a:not([href]){text-decoration:none}p{margin:0 0 var(--ifm-paragraph-margin-bottom)}blockquote{border-left:var(--ifm-blockquote-border-left-width) solid var(--ifm-blockquote-border-color);box-shadow:var(--ifm-blockquote-shadow);color:var(--ifm-blockquote-color);font-size:var(--ifm-blockquote-font-size);padding:var(--ifm-blockquote-padding-vertical) var(--ifm-blockquote-padding-horizontal)}blockquote>:first-child{margin-top:0}blockquote>:last-child{margin-bottom:0}hr{background-color:var(--ifm-hr-background-color);border:0;height:var(--ifm-hr-height);margin:var(--ifm-hr-margin-vertical) 0}.shadow--lw{box-shadow:var(--ifm-global-shadow-lw)!important}.shadow--md{box-shadow:var(--ifm-global-shadow-md)!important}.shadow--tl{box-shadow:var(--ifm-global-shadow-tl)!important}.text--primary,.wordWrapButtonEnabled_EoeP .wordWrapButtonIcon_Bwma{color:var(--ifm-color-primary)}.text--secondary{color:var(--ifm-color-secondary)}.text--success{color:var(--ifm-color-success)}.text--info{color:var(--ifm-color-info)}.text--warning{color:var(--ifm-color-warning)}.text--danger{color:var(--ifm-color-danger)}.text--center{text-align:center}.text--left{text-align:left}.text--justify{text-align:justify}.text--right{text-align:right}.text--capitalize{text-transform:capitalize}.text--lowercase{text-transform:lowercase}.admonitionHeading_tbUL,.alert__heading,.text--uppercase{text-transform:uppercase}.text--light{font-weight:var(--ifm-font-weight-light)}.text--normal{font-weight:var(--ifm-font-weight-normal)}.text--semibold{font-weight:var(--ifm-font-weight-semibold)}.text--bold{font-weight:var(--ifm-font-weight-bold)}.text--italic{font-style:italic}.text--truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text--break{word-wrap:break-word!important;word-break:break-word!important}.clean-btn{background:none;border:none;color:inherit;cursor:pointer;font-family:inherit;padding:0}.alert,.alert .close{color:var(--ifm-alert-foreground-color)}.clean-list{padding-left:0}.alert--primary{--ifm-alert-background-color:var(--ifm-color-primary-contrast-background);--ifm-alert-background-color-highlight:#3578e526;--ifm-alert-foreground-color:var(--ifm-color-primary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-primary-dark)}.alert--secondary{--ifm-alert-background-color:var(--ifm-color-secondary-contrast-background);--ifm-alert-background-color-highlight:#ebedf026;--ifm-alert-foreground-color:var(--ifm-color-secondary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-secondary-dark)}.alert--success{--ifm-alert-background-color:var(--ifm-color-success-contrast-background);--ifm-alert-background-color-highlight:#00a40026;--ifm-alert-foreground-color:var(--ifm-color-success-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-success-dark)}.alert--info{--ifm-alert-background-color:var(--ifm-color-info-contrast-background);--ifm-alert-background-color-highlight:#54c7ec26;--ifm-alert-foreground-color:var(--ifm-color-info-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-info-dark)}.alert--warning{--ifm-alert-background-color:var(--ifm-color-warning-contrast-background);--ifm-alert-background-color-highlight:#ffba0026;--ifm-alert-foreground-color:var(--ifm-color-warning-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-warning-dark)}.alert--danger{--ifm-alert-background-color:var(--ifm-color-danger-contrast-background);--ifm-alert-background-color-highlight:#fa383e26;--ifm-alert-foreground-color:var(--ifm-color-danger-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-danger-dark)}.alert{--ifm-code-background:var(--ifm-alert-background-color-highlight);--ifm-link-color:var(--ifm-alert-foreground-color);--ifm-link-hover-color:var(--ifm-alert-foreground-color);--ifm-link-decoration:underline;--ifm-tabs-color:var(--ifm-alert-foreground-color);--ifm-tabs-color-active:var(--ifm-alert-foreground-color);--ifm-tabs-color-active-border:var(--ifm-alert-border-color);background-color:var(--ifm-alert-background-color);border:var(--ifm-alert-border-width) solid var(--ifm-alert-border-color);border-left-width:var(--ifm-alert-border-left-width);border-radius:var(--ifm-alert-border-radius);box-shadow:var(--ifm-alert-shadow);padding:var(--ifm-alert-padding-vertical) var(--ifm-alert-padding-horizontal)}.alert__heading{align-items:center;display:flex;font:700 var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.5rem}.alert__icon{display:inline-flex;margin-right:.4em}.alert__icon svg{fill:var(--ifm-alert-foreground-color);stroke:var(--ifm-alert-foreground-color);stroke-width:0}.alert .close{margin:calc(var(--ifm-alert-padding-vertical)*-1) calc(var(--ifm-alert-padding-horizontal)*-1) 0 0;opacity:.75}.alert .close:focus,.alert .close:hover{opacity:1}.alert a{text-decoration-color:var(--ifm-alert-border-color)}.alert a:hover{text-decoration-thickness:2px}.avatar{column-gap:var(--ifm-avatar-intro-margin);display:flex}.avatar__photo{border-radius:50%;display:block;height:var(--ifm-avatar-photo-size);overflow:hidden;width:var(--ifm-avatar-photo-size)}.card--full-height,.navbar__logo img,body,html{height:100%}.avatar__photo--sm{--ifm-avatar-photo-size:2rem}.avatar__photo--lg{--ifm-avatar-photo-size:4rem}.avatar__photo--xl{--ifm-avatar-photo-size:6rem}.avatar__intro{display:flex;flex:1 1;flex-direction:column;justify-content:center;text-align:var(--ifm-avatar-intro-alignment)}.badge,.breadcrumbs__item,.breadcrumbs__link,.button,.dropdown>.navbar__link:after,.searchBarContainer_NW3z.searchIndexLoading_EJ1f .searchBarLoadingRing_YnHq{display:inline-block}.avatar__name{font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base)}.avatar__subtitle{margin-top:.25rem}.avatar--vertical{--ifm-avatar-intro-alignment:center;--ifm-avatar-intro-margin:0.5rem;align-items:center;flex-direction:column}.badge{background-color:var(--ifm-badge-background-color);border:var(--ifm-badge-border-width) solid var(--ifm-badge-border-color);border-radius:var(--ifm-badge-border-radius);color:var(--ifm-badge-color);font-size:75%;font-weight:var(--ifm-font-weight-bold);line-height:1;padding:var(--ifm-badge-padding-vertical) var(--ifm-badge-padding-horizontal)}.badge--primary{--ifm-badge-background-color:var(--ifm-color-primary)}.badge--secondary{--ifm-badge-background-color:var(--ifm-color-secondary);color:var(--ifm-color-black)}.breadcrumbs__link,.button.button--secondary.button--outline:not(.button--active):not(:hover){color:var(--ifm-font-color-base)}.badge--success{--ifm-badge-background-color:var(--ifm-color-success)}.badge--info{--ifm-badge-background-color:var(--ifm-color-info)}.badge--warning{--ifm-badge-background-color:var(--ifm-color-warning)}.badge--danger{--ifm-badge-background-color:var(--ifm-color-danger)}.breadcrumbs{margin-bottom:0;padding-left:0}.breadcrumbs__item:not(:last-child):after{background:var(--ifm-breadcrumb-separator) center;content:" ";display:inline-block;filter:var(--ifm-breadcrumb-separator-filter);height:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier));margin:0 var(--ifm-breadcrumb-spacing);opacity:.5;width:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier))}.breadcrumbs__item--active .breadcrumbs__link{background:var(--ifm-breadcrumb-item-background-active);color:var(--ifm-breadcrumb-color-active)}.breadcrumbs__link{border-radius:var(--ifm-breadcrumb-border-radius);font-size:calc(1rem*var(--ifm-breadcrumb-size-multiplier));padding:calc(var(--ifm-breadcrumb-padding-vertical)*var(--ifm-breadcrumb-size-multiplier)) calc(var(--ifm-breadcrumb-padding-horizontal)*var(--ifm-breadcrumb-size-multiplier));transition-duration:var(--ifm-transition-fast);transition-property:background,color}.breadcrumbs__link:any-link:hover,.breadcrumbs__link:link:hover,.breadcrumbs__link:visited:hover,area[href].breadcrumbs__link:hover{background:var(--ifm-breadcrumb-item-background-active);text-decoration:none}.breadcrumbs--sm{--ifm-breadcrumb-size-multiplier:0.8}.breadcrumbs--lg{--ifm-breadcrumb-size-multiplier:1.2}.button{background-color:var(--ifm-button-background-color);border:var(--ifm-button-border-width) solid var(--ifm-button-border-color);border-radius:var(--ifm-button-border-radius);cursor:pointer;font-size:calc(.875rem*var(--ifm-button-size-multiplier));font-weight:var(--ifm-button-font-weight);line-height:1.5;padding:calc(var(--ifm-button-padding-vertical)*var(--ifm-button-size-multiplier)) calc(var(--ifm-button-padding-horizontal)*var(--ifm-button-size-multiplier));text-align:center;transition-duration:var(--ifm-button-transition-duration);transition-property:color,background,border-color;-webkit-user-select:none;user-select:none;white-space:nowrap}.button,.button:hover{color:var(--ifm-button-color)}.button--outline{--ifm-button-color:var(--ifm-button-border-color)}.button--outline:hover{--ifm-button-background-color:var(--ifm-button-border-color)}.button--link{--ifm-button-border-color:#0000;color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}.button--link.button--active,.button--link:active,.button--link:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button.disabled,.button:disabled,.button[disabled]{opacity:.65;pointer-events:none}.button--sm{--ifm-button-size-multiplier:0.8}.button--lg{--ifm-button-size-multiplier:1.35}.button--block{display:block;width:100%}.button.button--secondary{color:var(--ifm-color-gray-900)}:where(.button--primary){--ifm-button-background-color:var(--ifm-color-primary);--ifm-button-border-color:var(--ifm-color-primary)}:where(.button--primary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-primary-dark);--ifm-button-border-color:var(--ifm-color-primary-dark)}.button--primary.button--active,.button--primary:active{--ifm-button-background-color:var(--ifm-color-primary-darker);--ifm-button-border-color:var(--ifm-color-primary-darker)}:where(.button--secondary){--ifm-button-background-color:var(--ifm-color-secondary);--ifm-button-border-color:var(--ifm-color-secondary)}:where(.button--secondary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-secondary-dark);--ifm-button-border-color:var(--ifm-color-secondary-dark)}.button--secondary.button--active,.button--secondary:active{--ifm-button-background-color:var(--ifm-color-secondary-darker);--ifm-button-border-color:var(--ifm-color-secondary-darker)}:where(.button--success){--ifm-button-background-color:var(--ifm-color-success);--ifm-button-border-color:var(--ifm-color-success)}:where(.button--success):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-success-dark);--ifm-button-border-color:var(--ifm-color-success-dark)}.button--success.button--active,.button--success:active{--ifm-button-background-color:var(--ifm-color-success-darker);--ifm-button-border-color:var(--ifm-color-success-darker)}:where(.button--info){--ifm-button-background-color:var(--ifm-color-info);--ifm-button-border-color:var(--ifm-color-info)}:where(.button--info):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-info-dark);--ifm-button-border-color:var(--ifm-color-info-dark)}.button--info.button--active,.button--info:active{--ifm-button-background-color:var(--ifm-color-info-darker);--ifm-button-border-color:var(--ifm-color-info-darker)}:where(.button--warning){--ifm-button-background-color:var(--ifm-color-warning);--ifm-button-border-color:var(--ifm-color-warning)}:where(.button--warning):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-warning-dark);--ifm-button-border-color:var(--ifm-color-warning-dark)}.button--warning.button--active,.button--warning:active{--ifm-button-background-color:var(--ifm-color-warning-darker);--ifm-button-border-color:var(--ifm-color-warning-darker)}:where(.button--danger){--ifm-button-background-color:var(--ifm-color-danger);--ifm-button-border-color:var(--ifm-color-danger)}:where(.button--danger):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-danger-dark);--ifm-button-border-color:var(--ifm-color-danger-dark)}.button--danger.button--active,.button--danger:active{--ifm-button-background-color:var(--ifm-color-danger-darker);--ifm-button-border-color:var(--ifm-color-danger-darker)}.button-group{display:inline-flex;gap:var(--ifm-button-group-spacing)}.button-group>.button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.button-group>.button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.button-group--block{display:flex;justify-content:stretch}.button-group--block>.button{flex-grow:1}.card{background-color:var(--ifm-card-background-color);border-radius:var(--ifm-card-border-radius);box-shadow:var(--ifm-global-shadow-lw);display:flex;flex-direction:column;overflow:hidden}.card__image{padding-top:var(--ifm-card-vertical-spacing)}.card__image:first-child{padding-top:0}.card__body,.card__footer,.card__header{padding:var(--ifm-card-vertical-spacing) var(--ifm-card-horizontal-spacing)}.card__body:not(:last-child),.card__footer:not(:last-child),.card__header:not(:last-child){padding-bottom:0}.card__body>:last-child,.card__footer>:last-child,.card__header>:last-child{margin-bottom:0}.card__footer{margin-top:auto}.table-of-contents{font-size:.8rem;margin-bottom:0;padding:var(--ifm-toc-padding-vertical) 0}.table-of-contents,.table-of-contents ul{list-style:none;padding-left:var(--ifm-toc-padding-horizontal)}.table-of-contents li{margin:var(--ifm-toc-padding-vertical) var(--ifm-toc-padding-horizontal)}.table-of-contents__left-border{border-left:1px solid var(--ifm-toc-border-color)}.table-of-contents__link{color:var(--ifm-toc-link-color);display:block}.table-of-contents__link--active,.table-of-contents__link--active code,.table-of-contents__link:hover,.table-of-contents__link:hover code{color:var(--ifm-color-primary);text-decoration:none}.content_knG7 a,.hitFooter_E9YW a,.suggestion_fB_2.cursor_eG29 mark{text-decoration:underline}.close{color:var(--ifm-color-black);float:right;font-size:1.5rem;font-weight:var(--ifm-font-weight-bold);line-height:1;opacity:.5;padding:1rem;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.close:hover{opacity:.7}.close:focus,.theme-code-block-highlighted-line .codeLineNumber_Tfdd:before{opacity:.8}.dropdown{display:inline-flex;font-weight:var(--ifm-dropdown-font-weight);position:relative;vertical-align:top}.dropdown--hoverable:hover .dropdown__menu,.dropdown--show .dropdown__menu{opacity:1;pointer-events:all;transform:translateY(-1px);visibility:visible}.dropdown--right .dropdown__menu{left:inherit;right:0}.dropdown--nocaret .navbar__link:after{content:none!important}.dropdown__menu{background-color:var(--ifm-dropdown-background-color);border-radius:var(--ifm-global-radius);box-shadow:var(--ifm-global-shadow-md);left:0;max-height:80vh;min-width:10rem;opacity:0;overflow-y:auto;padding:.5rem;pointer-events:none;position:absolute;top:calc(100% - var(--ifm-navbar-item-padding-vertical) + .3rem);transform:translateY(-.625rem);transition-duration:var(--ifm-transition-fast);transition-property:opacity,transform,visibility;transition-timing-function:var(--ifm-transition-timing-default);visibility:hidden;z-index:var(--ifm-z-index-dropdown)}.sidebar_re4s,.tableOfContents_bqdL{max-height:calc(100vh - var(--ifm-navbar-height) - 2rem)}.menu__caret,.menu__link,.menu__list-item-collapsible{border-radius:.25rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.dropdown__link{border-radius:.25rem;color:var(--ifm-dropdown-link-color);display:block;font-size:.875rem;margin-top:.2rem;padding:.25rem .5rem;white-space:nowrap}.dropdown__link--active,.dropdown__link:hover{background-color:var(--ifm-dropdown-hover-background-color);color:var(--ifm-dropdown-link-color);text-decoration:none}.dropdown__link--active,.dropdown__link--active:hover{--ifm-dropdown-link-color:var(--ifm-link-color)}.dropdown>.navbar__link:after{border-color:currentcolor #0000;border-style:solid;border-width:.4em .4em 0;content:"";margin-left:.3em;position:relative;top:2px;transform:translateY(-50%)}.footer{background-color:var(--ifm-footer-background-color);color:var(--ifm-footer-color);padding:var(--ifm-footer-padding-vertical) var(--ifm-footer-padding-horizontal)}.footer--dark{--ifm-footer-background-color:#303846;--ifm-footer-color:var(--ifm-footer-link-color);--ifm-footer-link-color:var(--ifm-color-secondary);--ifm-footer-title-color:var(--ifm-color-white)}.footer__links{margin-bottom:1rem}.footer__link-item{color:var(--ifm-footer-link-color);line-height:2}.footer__link-item:hover{color:var(--ifm-footer-link-hover-color)}.footer__link-separator{margin:0 var(--ifm-footer-link-horizontal-spacing)}.footer__logo{margin-top:1rem;max-width:var(--ifm-footer-logo-max-width)}.footer__title{color:var(--ifm-footer-title-color);font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base);margin-bottom:var(--ifm-heading-margin-bottom)}.menu,.navbar__link{font-weight:var(--ifm-font-weight-semibold)}.docItemContainer_Djhp article>:first-child,.docItemContainer_Djhp header+*,.footer__item{margin-top:0}.admonitionContent_S0QG>:last-child,.collapsibleContent_i85q>:last-child,.footer__items,.searchResultItem_U687>h2{margin-bottom:0}.codeBlockStandalone_MEMb,[type=checkbox]{padding:0}.hero{align-items:center;background-color:var(--ifm-hero-background-color);color:var(--ifm-hero-text-color);display:flex;padding:4rem 2rem}.hero--primary{--ifm-hero-background-color:var(--ifm-color-primary);--ifm-hero-text-color:var(--ifm-font-color-base-inverse)}.hero--dark{--ifm-hero-background-color:#303846;--ifm-hero-text-color:var(--ifm-color-white)}.hero__title,.title_f1Hy{font-size:3rem}.hero__subtitle{font-size:1.5rem}.menu__list{margin:0;padding-left:0}.menu__caret,.menu__link{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu__list .menu__list{flex:0 0 100%;margin-top:.25rem;padding-left:var(--ifm-menu-link-padding-horizontal)}.menu__list-item:not(:first-child){margin-top:.25rem}.menu__list-item--collapsed .menu__list{height:0;overflow:hidden}.details_lb9f[data-collapsed=false].isBrowser_bmU9>summary:before,.details_lb9f[open]:not(.isBrowser_bmU9)>summary:before,.menu__list-item--collapsed .menu__caret:before,.menu__list-item--collapsed .menu__link--sublist:after{transform:rotate(90deg)}.menu__list-item-collapsible{display:flex;flex-wrap:wrap;position:relative}.menu__caret:hover,.menu__link:hover,.menu__list-item-collapsible--active,.menu__list-item-collapsible:hover{background:var(--ifm-menu-color-background-hover)}.menu__list-item-collapsible .menu__link--active,.menu__list-item-collapsible .menu__link:hover{background:none!important}.menu__caret,.menu__link{align-items:center;display:flex}.menu__link{color:var(--ifm-menu-color);flex:1;line-height:1.25}.menu__link:hover{color:var(--ifm-menu-color);text-decoration:none}.menu__caret:before,.menu__link--sublist-caret:after{content:"";height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast) linear;width:1.25rem;filter:var(--ifm-menu-link-sublist-icon-filter)}.menu__link--sublist-caret:after{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem;margin-left:auto;min-width:1.25rem}.menu__link--active,.menu__link--active:hover{color:var(--ifm-menu-color-active)}.navbar__brand,.navbar__link{color:var(--ifm-navbar-link-color)}.menu__link--active:not(.menu__link--sublist){background-color:var(--ifm-menu-color-background-active)}.menu__caret:before{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem}.navbar--dark,html[data-theme=dark]{--ifm-menu-link-sublist-icon-filter:invert(100%) sepia(94%) saturate(17%) hue-rotate(223deg) brightness(104%) contrast(98%)}.navbar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-navbar-shadow);height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar,.navbar>.container,.navbar>.container-fluid{display:flex}.navbar--fixed-top{position:sticky;top:0;z-index:var(--ifm-z-index-fixed)}.navbar-sidebar,.navbar-sidebar__backdrop{bottom:0;opacity:0;position:fixed;transition-duration:var(--ifm-transition-fast);transition-timing-function:ease-in-out;left:0;top:0;visibility:hidden}.navbar__inner{display:flex;flex-wrap:wrap;justify-content:space-between;width:100%}.navbar__brand{align-items:center;display:flex;margin-right:1rem;min-width:0}.navbar__brand:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.announcementBarContent_xLdY,.navbar__title{flex:1 1 auto}.navbar__toggle{display:none;margin-right:.5rem}.navbar__logo{flex:0 0 auto;height:2rem;margin-right:.5rem}.navbar__items{align-items:center;display:flex;flex:1;min-width:0}.navbar__items--center{flex:0 0 auto}.navbar__items--center .navbar__brand{margin:0}.navbar__items--center+.navbar__items--right{flex:1}.navbar__items--right{flex:0 0 auto;justify-content:flex-end}.navbar__items--right>:last-child{padding-right:0}.navbar__item{display:inline-block;padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}#nprogress,.navbar__item.dropdown .navbar__link:not([href]){pointer-events:none}.navbar__link--active,.navbar__link:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.navbar--dark,.navbar--primary{--ifm-menu-color:var(--ifm-color-gray-300);--ifm-navbar-link-color:var(--ifm-color-gray-100);--ifm-navbar-search-input-background-color:#ffffff1a;--ifm-navbar-search-input-placeholder-color:#ffffff80;color:var(--ifm-color-white)}.navbar--dark{--ifm-navbar-background-color:#242526;--ifm-menu-color-background-active:#ffffff0d;--ifm-navbar-search-input-color:var(--ifm-color-white)}.navbar--primary{--ifm-navbar-background-color:var(--ifm-color-primary);--ifm-navbar-link-hover-color:var(--ifm-color-white);--ifm-menu-color-active:var(--ifm-color-white);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-500)}.navbar__search-input{-webkit-appearance:none;appearance:none;background:var(--ifm-navbar-search-input-background-color) var(--ifm-navbar-search-input-icon) no-repeat .75rem center/1rem 1rem;border:none;border-radius:2rem;color:var(--ifm-navbar-search-input-color);cursor:text;display:inline-block;font-size:.9rem;height:2rem;padding:0 .5rem 0 2.25rem;width:12.5rem}.navbar__search-input::placeholder{color:var(--ifm-navbar-search-input-placeholder-color)}.navbar-sidebar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-global-shadow-md);transform:translate3d(-100%,0,0);transition-property:opacity,visibility,transform;width:var(--ifm-navbar-sidebar-width)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar__items{transform:translateZ(0)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar--show .navbar-sidebar__backdrop{opacity:1;visibility:visible}.navbar-sidebar__backdrop{background-color:#0009;right:0;transition-property:opacity,visibility}.navbar-sidebar__brand{align-items:center;box-shadow:var(--ifm-navbar-shadow);display:flex;flex:1;height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar-sidebar__items{display:flex;height:calc(100% - var(--ifm-navbar-height));transition:transform var(--ifm-transition-fast) ease-in-out}.navbar-sidebar__items--show-secondary{transform:translate3d(calc((var(--ifm-navbar-sidebar-width))*-1),0,0)}.navbar-sidebar__item{flex-shrink:0;padding:.5rem;width:calc(var(--ifm-navbar-sidebar-width))}.navbar-sidebar__back{background:var(--ifm-menu-color-background-active);font-size:15px;font-weight:var(--ifm-button-font-weight);margin:0 0 .2rem -.5rem;padding:.6rem 1.5rem;position:relative;text-align:left;top:-.5rem;width:calc(100% + 1rem)}.navbar-sidebar__close{display:flex;margin-left:auto}.pagination{column-gap:var(--ifm-pagination-page-spacing);display:flex;font-size:var(--ifm-pagination-font-size);padding-left:0}.pagination--sm{--ifm-pagination-font-size:0.8rem;--ifm-pagination-padding-horizontal:0.8rem;--ifm-pagination-padding-vertical:0.2rem}.pagination--lg{--ifm-pagination-font-size:1.2rem;--ifm-pagination-padding-horizontal:1.2rem;--ifm-pagination-padding-vertical:0.3rem}.pagination__item{display:inline-flex}.pagination__item>span{padding:var(--ifm-pagination-padding-vertical)}.pagination__item--active .pagination__link{color:var(--ifm-pagination-color-active)}.pagination__item--active .pagination__link,.pagination__item:not(.pagination__item--active):hover .pagination__link{background:var(--ifm-pagination-item-active-background)}.pagination__item--disabled,.pagination__item[disabled]{opacity:.25;pointer-events:none}.pagination__link{border-radius:var(--ifm-pagination-border-radius);color:var(--ifm-font-color-base);display:inline-block;padding:var(--ifm-pagination-padding-vertical) var(--ifm-pagination-padding-horizontal);transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination__link:hover,.sidebarItemLink_mo7H:hover{text-decoration:none}.pagination-nav{grid-gap:var(--ifm-spacing-horizontal);display:grid;gap:var(--ifm-spacing-horizontal);grid-template-columns:repeat(2,1fr)}.pagination-nav__link{border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-pagination-nav-border-radius);display:block;height:100%;line-height:var(--ifm-heading-line-height);padding:var(--ifm-global-spacing);transition:border-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination-nav__link:hover{border-color:var(--ifm-pagination-nav-color-hover);text-decoration:none}.pagination-nav__link--next{grid-column:2/3;text-align:right}.pagination-nav__label{font-size:var(--ifm-h4-font-size);font-weight:var(--ifm-heading-font-weight);word-break:break-word}.pagination-nav__link--prev .pagination-nav__label:before{content:"« "}.pagination-nav__link--next .pagination-nav__label:after{content:" »"}.pagination-nav__sublabel{color:var(--ifm-color-content-secondary);font-size:var(--ifm-h5-font-size);font-weight:var(--ifm-font-weight-semibold);margin-bottom:.25rem}.pills__item,.tabs{font-weight:var(--ifm-font-weight-bold)}.pills{display:flex;gap:var(--ifm-pills-spacing);padding-left:0}.pills__item{border-radius:.5rem;cursor:pointer;display:inline-block;padding:.25rem 1rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.tabs,:not(.containsTaskList_mC6p>li)>.containsTaskList_mC6p{padding-left:0}.pills__item--active{color:var(--ifm-pills-color-active)}.pills__item--active,.pills__item:not(.pills__item--active):hover{background:var(--ifm-pills-color-background-active)}.pills--block{justify-content:stretch}.pills--block .pills__item{flex-grow:1;text-align:center}.tabs{color:var(--ifm-tabs-color);display:flex;margin-bottom:0;overflow-x:auto}.tabs__item{border-bottom:3px solid #0000;border-radius:var(--ifm-global-radius);cursor:pointer;display:inline-flex;padding:var(--ifm-tabs-padding-vertical) var(--ifm-tabs-padding-horizontal);transition:background-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.tabs__item--active{border-bottom-color:var(--ifm-tabs-color-active-border);border-bottom-left-radius:0;border-bottom-right-radius:0;color:var(--ifm-tabs-color-active)}.tabs__item:hover{background-color:var(--ifm-hover-overlay)}.tabs--block{justify-content:stretch}.tabs--block .tabs__item{flex-grow:1;justify-content:center}html[data-theme=dark]{--ifm-color-scheme:dark;--ifm-color-emphasis-0:var(--ifm-color-gray-1000);--ifm-color-emphasis-100:var(--ifm-color-gray-900);--ifm-color-emphasis-200:var(--ifm-color-gray-800);--ifm-color-emphasis-300:var(--ifm-color-gray-700);--ifm-color-emphasis-400:var(--ifm-color-gray-600);--ifm-color-emphasis-600:var(--ifm-color-gray-400);--ifm-color-emphasis-700:var(--ifm-color-gray-300);--ifm-color-emphasis-800:var(--ifm-color-gray-200);--ifm-color-emphasis-900:var(--ifm-color-gray-100);--ifm-color-emphasis-1000:var(--ifm-color-gray-0);--ifm-background-color:#1b1b1d;--ifm-background-surface-color:#242526;--ifm-hover-overlay:#ffffff0d;--ifm-color-content:#e3e3e3;--ifm-color-content-secondary:#fff;--ifm-breadcrumb-separator-filter:invert(64%) sepia(11%) saturate(0%) hue-rotate(149deg) brightness(99%) contrast(95%);--ifm-code-background:#ffffff1a;--ifm-scrollbar-track-background-color:#444;--ifm-scrollbar-thumb-background-color:#686868;--ifm-scrollbar-thumb-hover-background-color:#7a7a7a;--ifm-table-stripe-background:#ffffff12;--ifm-toc-border-color:var(--ifm-color-emphasis-200);--ifm-color-primary-contrast-background:#102445;--ifm-color-primary-contrast-foreground:#ebf2fc;--ifm-color-secondary-contrast-background:#474748;--ifm-color-secondary-contrast-foreground:#fdfdfe;--ifm-color-success-contrast-background:#003100;--ifm-color-success-contrast-foreground:#e6f6e6;--ifm-color-info-contrast-background:#193c47;--ifm-color-info-contrast-foreground:#eef9fd;--ifm-color-warning-contrast-background:#4d3800;--ifm-color-warning-contrast-foreground:#fff8e6;--ifm-color-danger-contrast-background:#4b1113;--ifm-color-danger-contrast-foreground:#ffebec}#nprogress .bar{background:var(--docusaurus-progress-bar-color);height:2px;left:0;position:fixed;top:0;width:100%;z-index:1031}#nprogress .peg{box-shadow:0 0 10px var(--docusaurus-progress-bar-color),0 0 5px var(--docusaurus-progress-bar-color);height:100%;opacity:1;position:absolute;right:0;transform:rotate(3deg) translateY(-4px);width:100px}:root,[data-theme=dark]{--ifm-color-primary:#2196f3;--ifm-color-primary-dark:#1565c0;--ifm-color-primary-darker:#0d47a1;--ifm-color-primary-darkest:#002171;--ifm-color-primary-light:#64b5f6;--ifm-color-primary-lighter:#bbdefb;--ifm-color-primary-lightest:#e3f2fd;--ifm-code-font-size:95%;--docusaurus-highlighted-code-line-bg:#0000001a}div[class^=announcementBar_]{background:repeating-linear-gradient(-35deg,var(--ifm-color-primary-lighter),var(--ifm-color-primary-lighter) 20px,var(--ifm-color-primary-lightest) 10px,var(--ifm-color-primary-lightest) 40px);font-weight:700}h2,h3,h4,h5,h6{color:#40b8fa}@font-face{font-family:SourceHanSansCN;font-style:normal;font-weight:400;src:url(/assets/fonts/SourceHanSansCN-Regular-1235a610813e82ec7e42bbb8123b3d74.ttf) format("truetype")}body,h1,h2,h3,h4,h5,h6,p{font-family:Helvetica,SourceHanSansCN}body:not(.navigation-with-keyboard) :not(input):focus{outline:0}#__docusaurus-base-url-issue-banner-container,.docSidebarContainer_b6E3,.hideAction_vcyE>svg,.sidebarLogo_isFc,.themedImage_ToTc,[data-theme=dark] .lightToggleIcon_pyhR,[data-theme=light] .darkToggleIcon_wfgR,html[data-announcement-bar-initially-dismissed=true] .announcementBar_mb4j{display:none}.skipToContent_fXgn{background-color:var(--ifm-background-surface-color);color:var(--ifm-color-emphasis-900);left:100%;padding:calc(var(--ifm-global-spacing)/2) var(--ifm-global-spacing);position:fixed;top:1rem;z-index:calc(var(--ifm-z-index-fixed) + 1)}.skipToContent_fXgn:focus{box-shadow:var(--ifm-global-shadow-md);left:1rem}.closeButton_CVFx{line-height:0;padding:0}.content_knG7{font-size:85%;padding:5px 0;text-align:center}.content_knG7 a{color:inherit}.announcementBar_mb4j{align-items:center;background-color:var(--ifm-color-white);border-bottom:1px solid var(--ifm-color-emphasis-100);color:var(--ifm-color-black);display:flex;height:var(--docusaurus-announcement-bar-height)}.announcementBarPlaceholder_vyr4{flex:0 0 10px}.announcementBarClose_gvF7{align-self:stretch;flex:0 0 30px}.toggle_vylO{height:2rem;width:2rem}.toggleButton_gllP{align-items:center;border-radius:50%;display:flex;height:100%;justify-content:center;transition:background var(--ifm-transition-fast);width:100%}.toggleButton_gllP:hover{background:var(--ifm-color-emphasis-200)}.toggleButtonDisabled_aARS{cursor:not-allowed}.darkNavbarColorModeToggle_X3D1:hover{background:var(--ifm-color-gray-800)}[data-theme=dark] .themedImage--dark_i4oU,[data-theme=light] .themedImage--light_HNdA,html:not([data-theme]) .themedComponent--light_NU7w{display:initial}.iconExternalLink_nPIU{margin-left:.3rem}.iconLanguage_nlXk{margin-right:5px;vertical-align:text-bottom}.searchBar_RVTs .dropdownMenu_qbY6{background:var(--search-local-modal-background,#f5f6f7);border-radius:6px;box-shadow:var(--search-local-modal-shadow,inset 1px 1px 0 0 #ffffff80,0 3px 8px 0 #555a64);left:auto!important;margin-top:8px;padding:var(--search-local-spacing,12px);position:relative;right:0!important;width:var(--search-local-modal-width,560px)}html[data-theme=dark] .searchBar_RVTs .dropdownMenu_qbY6{background:var(--search-local-modal-background,var(--ifm-background-color));box-shadow:var(--search-local-modal-shadow,inset 1px 1px 0 0 #2c2e40,0 3px 8px 0 #000309)}.searchBar_RVTs .dropdownMenu_qbY6 .suggestion_fB_2{align-items:center;background:var(--search-local-hit-background,#fff);border-radius:4px;box-shadow:var(--search-local-hit-shadow,0 1px 3px 0 #d4d9e1);color:var(--search-local-hit-color,#444950);cursor:pointer;display:flex;flex-direction:row;height:var(--search-local-hit-height,56px);padding:0 var(--search-local-spacing,12px);width:100%}.hitTree_kk6K,.noResults_l6Q3{align-items:center;display:flex}html[data-theme=dark] .dropdownMenu_qbY6 .suggestion_fB_2{background:var(--search-local-hit-background,var(--ifm-color-emphasis-100));box-shadow:var(--search-local-hit-shadow,none);color:var(--search-local-hit-color,var(--ifm-font-color-base))}.searchBar_RVTs .dropdownMenu_qbY6 .suggestion_fB_2:not(:last-child){margin-bottom:4px}.searchBar_RVTs .dropdownMenu_qbY6 .suggestion_fB_2.cursor_eG29{background-color:var(--search-local-highlight-color,var(--ifm-color-primary))}.hitFooter_E9YW a,.hitIcon_a7Zy,.hitPath_ieM4,.hitTree_kk6K,.noResultsIcon_EBY5{color:var(--search-local-muted-color,#969faf)}html[data-theme=dark] .hitIcon_a7Zy,html[data-theme=dark] .hitPath_ieM4,html[data-theme=dark] .hitTree_kk6K,html[data-theme=dark] .noResultsIcon_EBY5{color:var(--search-local-muted-color,var(--ifm-color-secondary-darkest))}.hitTree_kk6K>svg{height:var(--search-local-hit-height,56px);opacity:.5;width:24px}.hitIcon_a7Zy,.hitTree_kk6K>svg{stroke-width:var(--search-local-icon-stroke-width,1.4)}.hitAction_NqkB,.hitIcon_a7Zy{height:20px;width:20px}.hitWrapper_sAK8{display:flex;flex:1 1 auto;flex-direction:column;font-weight:500;justify-content:center;margin:0 8px;overflow-x:hidden;width:80%}.hitWrapper_sAK8 mark{background:none;color:var(--search-local-highlight-color,var(--ifm-color-primary))}.hitTitle_vyVt{font-size:.9em}.hitPath_ieM4{font-size:.75em}.hitPath_ieM4,.hitTitle_vyVt{overflow-x:hidden;text-overflow:ellipsis;white-space:nowrap}.noResults_l6Q3{flex-direction:column;justify-content:center;padding:var(--search-local-spacing,12px) 0}.noResultsIcon_EBY5{margin-bottom:var(--search-local-spacing,12px)}.hitFooter_E9YW{font-size:.85em;margin-top:var(--search-local-spacing,12px);text-align:center}.cursor_eG29 .hideAction_vcyE>svg,.tocCollapsibleContent_vkbj a{display:block}.suggestion_fB_2.cursor_eG29,.suggestion_fB_2.cursor_eG29 .hitIcon_a7Zy,.suggestion_fB_2.cursor_eG29 .hitPath_ieM4,.suggestion_fB_2.cursor_eG29 .hitTree_kk6K,.suggestion_fB_2.cursor_eG29 mark{color:var(--search-local-hit-active-color,var(--ifm-color-white))!important}.searchBarContainer_NW3z{margin-left:16px}.searchBarContainer_NW3z .searchBarLoadingRing_YnHq{display:none;left:10px;position:absolute;top:6px}.searchBarContainer_NW3z .searchClearButton_qk4g{background:none;border:none;line-height:1rem;padding:0;position:absolute;right:.8rem;top:50%;transform:translateY(-50%)}.navbar__search{position:relative}.searchIndexLoading_EJ1f .navbar__search-input{background-image:none}.searchHintContainer_Pkmr{align-items:center;display:flex;gap:4px;height:100%;justify-content:center;pointer-events:none;position:absolute;right:10px;top:0}.searchHint_iIMx{background-color:var(--ifm-navbar-search-input-background-color);border:1px solid var(--ifm-color-emphasis-500);box-shadow:inset 0 -1px 0 var(--ifm-color-emphasis-500);color:var(--ifm-navbar-search-input-placeholder-color)}.loadingRing_RJI3{display:inline-block;height:20px;opacity:var(--search-local-loading-icon-opacity,.5);position:relative;width:20px}.loadingRing_RJI3 div{animation:1.2s cubic-bezier(.5,0,.5,1) infinite a;border:2px solid var(--search-load-loading-icon-color,var(--ifm-navbar-search-input-color));border-color:var(--search-load-loading-icon-color,var(--ifm-navbar-search-input-color)) #0000 #0000 #0000;border-radius:50%;display:block;height:16px;margin:2px;position:absolute;width:16px}.loadingRing_RJI3 div:first-child{animation-delay:-.45s}.loadingRing_RJI3 div:nth-child(2){animation-delay:-.3s}.loadingRing_RJI3 div:nth-child(3){animation-delay:-.15s}@keyframes a{0%{transform:rotate(0)}to{transform:rotate(1turn)}}.navbarHideable_m1mJ{transition:transform var(--ifm-transition-fast) ease}.navbarHidden_jGov{transform:translate3d(0,calc(-100% - 2px),0)}.errorBoundaryError_a6uf{color:red;white-space:pre-wrap}.footerLogoLink_BH7S{opacity:.5;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.footerLogoLink_BH7S:hover,.hash-link:focus,:hover>.hash-link{opacity:1}.mainWrapper_z2l0{display:flex;flex:1 0 auto;flex-direction:column}.docusaurus-mt-lg{margin-top:3rem}#__docusaurus{display:flex;flex-direction:column;min-height:100%}.searchContextInput_mXoe,.searchQueryInput_CFBF{background:var(--ifm-background-color);border:var(--ifm-global-border-width) solid var(--ifm-color-content-secondary);border-radius:var(--ifm-global-radius);color:var(--ifm-font-color-base);font-size:var(--ifm-font-size-base);margin-bottom:1rem;padding:.5rem;width:100%}.searchResultItem_U687{border-bottom:1px solid #dfe3e8;padding:1rem 0}.searchResultItemPath_uIbk{color:var(--ifm-color-content-secondary);font-size:.8rem;margin:.5rem 0 0}.searchResultItemSummary_oZHr{font-style:italic;margin:.5rem 0 0}.backToTopButton_sjWU{background-color:var(--ifm-color-emphasis-200);border-radius:50%;bottom:1.3rem;box-shadow:var(--ifm-global-shadow-lw);height:3rem;opacity:0;position:fixed;right:1.3rem;transform:scale(0);transition:all var(--ifm-transition-fast) var(--ifm-transition-timing-default);visibility:hidden;width:3rem;z-index:calc(var(--ifm-z-index-fixed) - 1)}.backToTopButton_sjWU:after{background-color:var(--ifm-color-emphasis-1000);content:" ";display:inline-block;height:100%;-webkit-mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;width:100%}.backToTopButtonShow_xfvO{opacity:1;transform:scale(1);visibility:visible}[data-theme=dark]:root{--docusaurus-collapse-button-bg:#ffffff0d;--docusaurus-collapse-button-bg-hover:#ffffff1a}.collapseSidebarButton_PEFL{display:none;margin:0}.docMainContainer_gTbr,.docPage__5DB{display:flex;width:100%}.docPage__5DB{flex:1 0}.docsWrapper_BCFX{display:flex;flex:1 0 auto}.sidebar_re4s{overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 2rem)}.sidebarItemTitle_pO2u{font-size:var(--ifm-h3-font-size);font-weight:var(--ifm-font-weight-bold)}.container_mt6G,.sidebarItemList_Yudw{font-size:.9rem}.sidebarItem__DBe{margin-top:.7rem}.sidebarItemLink_mo7H{color:var(--ifm-font-color-base);display:block}.sidebarItemLinkActive_I1ZP{color:var(--ifm-color-primary)!important}.authorCol_Hf19{flex-grow:1!important;max-width:inherit!important}.imageOnlyAuthorRow_pa_O{display:flex;flex-flow:row wrap}.features_t9lD,}.buttons_AeoN{align-items:center;display:flex}.imageOnlyAuthorCol_G86a{margin-left:.3rem;margin-right:.3rem}.features_t9lD{padding:2rem 0;width:100%}.heroBanner_qdFl,[data-theme=dark]{overflow:hidden;padding:4rem 0;text-align:center;position:relative}.featureSvg_GfXr{height:200px;width:200px}.heroBanner_qdFl{-webkit-text-fill-color:#120c0c;background-color:#fff}[data-theme=dark]{background-color:#1b1b1d}.buttonGroup__atx button,.codeBlockContainer_Ckt0{background:var(--prism-background-color);color:var(--prism-color)}}.buttons_AeoN{justify-content:center}.codeBlockContainer_Ckt0{border-radius:var(--ifm-code-border-radius);box-shadow:var(--ifm-global-shadow-lw);margin-bottom:var(--ifm-leading)}.codeBlockContent_biex{border-radius:inherit;direction:ltr;position:relative}.codeBlockTitle_Ktv7{border-bottom:1px solid var(--ifm-color-emphasis-300);border-top-left-radius:inherit;border-top-right-radius:inherit;font-size:var(--ifm-code-font-size);font-weight:500;padding:.75rem var(--ifm-pre-padding)}.codeBlock_bY9V{--ifm-pre-background:var(--prism-background-color);margin:0;padding:0}.codeBlockTitle_Ktv7+.codeBlockContent_biex .codeBlock_bY9V{border-top-left-radius:0;border-top-right-radius:0}.codeBlockLines_e6Vv{float:left;font:inherit;min-width:100%;padding:var(--ifm-pre-padding)}.codeBlockLinesWithNumbering_o6Pm{display:table;padding:var(--ifm-pre-padding) 0}.buttonGroup__atx{column-gap:.2rem;display:flex;position:absolute;right:calc(var(--ifm-pre-padding)/2);top:calc(var(--ifm-pre-padding)/2)}.buttonGroup__atx button{align-items:center;border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-global-radius);display:flex;line-height:0;opacity:0;padding:.4rem;transition:opacity var(--ifm-transition-fast) ease-in-out}.buttonGroup__atx button:focus-visible,.buttonGroup__atx button:hover{opacity:1!important}.theme-code-block:hover .buttonGroup__atx button{opacity:.4}.iconEdit_Z9Sw{margin-right:.3em;vertical-align:sub}:where(:root){--docusaurus-highlighted-code-line-bg:#484d5b}:where([data-theme=dark]){--docusaurus-highlighted-code-line-bg:#646464}.theme-code-block-highlighted-line{background-color:var(--docusaurus-highlighted-code-line-bg);display:block;margin:0 calc(var(--ifm-pre-padding)*-1);padding:0 var(--ifm-pre-padding)}.codeLine_lJS_{counter-increment:a;display:table-row}.codeLineNumber_Tfdd{background:var(--ifm-pre-background);display:table-cell;left:0;overflow-wrap:normal;padding:0 var(--ifm-pre-padding);position:sticky;text-align:right;width:1%}.codeLineNumber_Tfdd:before{content:counter(a);opacity:.4}.codeLineContent_feaV{padding-right:var(--ifm-pre-padding)}.tag_zVej{border:1px solid var(--docusaurus-tag-list-border);transition:border var(--ifm-transition-fast)}.tag_zVej:hover{--docusaurus-tag-list-border:var(--ifm-link-color);text-decoration:none}.tagRegular_sFm0{border-radius:var(--ifm-global-radius);font-size:90%;padding:.2rem .5rem .3rem}.tagWithCount_h2kH{align-items:center;border-left:0;display:flex;padding:0 .5rem 0 1rem;position:relative}.tagWithCount_h2kH:after,.tagWithCount_h2kH:before{border:1px solid var(--docusaurus-tag-list-border);content:"";position:absolute;top:50%;transition:inherit}.tagWithCount_h2kH:before{border-bottom:0;border-right:0;height:1.18rem;right:100%;transform:translate(50%,-50%) rotate(-45deg);width:1.18rem}.tagWithCount_h2kH:after{border-radius:50%;height:.5rem;left:0;transform:translateY(-50%);width:.5rem}.tagWithCount_h2kH span{background:var(--ifm-color-secondary);border-radius:var(--ifm-global-radius);color:var(--ifm-color-black);font-size:.7rem;line-height:1.2;margin-left:.3rem;padding:.1rem .4rem}.theme-code-block:hover .copyButtonCopied_obH4{opacity:1!important}.copyButtonIcons_eSgA{height:1.125rem;position:relative;width:1.125rem}.copyButtonIcon_y97N,.copyButtonSuccessIcon_LjdS{fill:currentColor;height:inherit;left:0;opacity:inherit;position:absolute;top:0;transition:all var(--ifm-transition-fast) ease;width:inherit}.copyButtonSuccessIcon_LjdS{color:#00d600;left:50%;opacity:0;top:50%;transform:translate(-50%,-50%) scale(.33)}.copyButtonCopied_obH4 .copyButtonIcon_y97N{opacity:0;transform:scale(.33)}.copyButtonCopied_obH4 .copyButtonSuccessIcon_LjdS{opacity:1;transform:translate(-50%,-50%) scale(1);transition-delay:75ms}.tags_jXut{display:inline}.tag_QGVx{display:inline-block;margin:0 .4rem .5rem 0}.lastUpdated_vwxv{font-size:smaller;font-style:italic;margin-top:.2rem}.tocCollapsibleButton_TO0P{align-items:center;display:flex;font-size:inherit;justify-content:space-between;padding:.4rem .8rem;width:100%}.tocCollapsibleButton_TO0P:after{background:var(--ifm-menu-link-sublist-icon) 50% 50%/2rem 2rem no-repeat;content:"";filter:var(--ifm-menu-link-sublist-icon-filter);height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast);width:1.25rem}.tocCollapsibleButtonExpanded_MG3E:after,.tocCollapsibleExpanded_sAul{transform:none}.tocCollapsible_ETCw{background-color:var(--ifm-menu-color-background-active);border-radius:var(--ifm-global-radius);margin:1rem 0}.tocCollapsibleContent_vkbj>ul{border-left:none;border-top:1px solid var(--ifm-color-emphasis-300);font-size:15px;padding:.2rem 0}.tocCollapsibleContent_vkbj ul li{margin:.4rem .8rem}.wordWrapButtonIcon_Bwma{height:1.2rem;width:1.2rem}.details_lb9f{--docusaurus-details-summary-arrow-size:0.38rem;--docusaurus-details-transition:transform 200ms ease;--docusaurus-details-decoration-color:grey}.details_lb9f>summary{cursor:pointer;padding-left:1rem;position:relative}.details_lb9f>summary::-webkit-details-marker{display:none}.details_lb9f>summary:before{border-color:#0000 #0000 #0000 var(--docusaurus-details-decoration-color);border-style:solid;border-width:var(--docusaurus-details-summary-arrow-size);content:"";left:0;position:absolute;top:.45rem;transform:rotate(0);transform-origin:calc(var(--docusaurus-details-summary-arrow-size)/2) 50%;transition:var(--docusaurus-details-transition)}.collapsibleContent_i85q{border-top:1px solid var(--docusaurus-details-decoration-color);margin-top:1rem;padding-top:1rem}.details_b_Ee{--docusaurus-details-decoration-color:var(--ifm-alert-border-color);--docusaurus-details-transition:transform var(--ifm-transition-fast) ease;border:1px solid var(--ifm-alert-border-color);margin:0 0 var(--ifm-spacing-vertical)}.anchorWithStickyNavbar_LWe7{scroll-margin-top:calc(var(--ifm-navbar-height) + .5rem)}.anchorWithHideOnScrollNavbar_WYt5{scroll-margin-top:.5rem}.hash-link{opacity:0;padding-left:.5rem;transition:opacity var(--ifm-transition-fast);-webkit-user-select:none;user-select:none}.hash-link:before{content:"#"}.img_ev3q{height:auto}.admonition_LlT9{margin-bottom:1em}.admonitionHeading_tbUL{font:var(--ifm-heading-font-weight) var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.3rem}.admonitionHeading_tbUL code{text-transform:none}.admonitionIcon_kALy{display:inline-block;margin-right:.4em;vertical-align:middle}.admonitionIcon_kALy svg{fill:var(--ifm-alert-foreground-color);display:inline-block;height:1.6em;width:1.6em}.blogPostFooterDetailsFull_mRVl{flex-direction:column}.tableOfContents_bqdL{overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 1rem)}.breadcrumbHomeIcon_YNFT{height:1.1rem;position:relative;top:1px;vertical-align:top;width:1.1rem}.breadcrumbsContainer_Z_bl{--ifm-breadcrumb-size-multiplier:0.8;margin-bottom:.8rem}.mdxPageWrapper_j9I6{justify-content:center}@media (min-width:997px){.collapseSidebarButton_PEFL,.expandButton_m80_{background-color:var(--docusaurus-collapse-button-bg)}:root{--docusaurus-announcement-bar-height:30px}.announcementBarClose_gvF7,.announcementBarPlaceholder_vyr4{flex-basis:50px}.searchBox_ZlJk{padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}.collapseSidebarButton_PEFL{border:1px solid var(--ifm-toc-border-color);border-radius:0;bottom:0;display:block!important;height:40px;position:sticky}.collapseSidebarButtonIcon_kv0_{margin-top:4px;transform:rotate(180deg)}.expandButtonIcon_BlDH,[dir=rtl] .collapseSidebarButtonIcon_kv0_{transform:rotate(0)}.collapseSidebarButton_PEFL:focus,.collapseSidebarButton_PEFL:hover,.expandButton_m80_:focus,.expandButton_m80_:hover{background-color:var(--docusaurus-collapse-button-bg-hover)}.menuHtmlItem_M9Kj{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu_SIkG{flex-grow:1;padding:.5rem}@supports (scrollbar-gutter:stable){.menu_SIkG{padding:.5rem 0 .5rem .5rem;scrollbar-gutter:stable}}.menuWithAnnouncementBar_GW3s{margin-bottom:var(--docusaurus-announcement-bar-height)}.sidebar_njMd{display:flex;flex-direction:column;height:100%;padding-top:var(--ifm-navbar-height);width:var(--doc-sidebar-width)}.sidebarWithHideableNavbar_wUlq{padding-top:0}.sidebarHidden_VK0M{opacity:0;visibility:hidden}.sidebarLogo_isFc{align-items:center;color:inherit!important;display:flex!important;margin:0 var(--ifm-navbar-padding-horizontal);max-height:var(--ifm-navbar-height);min-height:var(--ifm-navbar-height);text-decoration:none!important}.sidebarLogo_isFc img{height:2rem;margin-right:.5rem}.expandButton_m80_{align-items:center;display:flex;height:100%;justify-content:center;position:absolute;right:0;top:0;transition:background-color var(--ifm-transition-fast) ease;width:100%}[dir=rtl] .expandButtonIcon_BlDH{transform:rotate(180deg)}.docSidebarContainer_b6E3{border-right:1px solid var(--ifm-toc-border-color);-webkit-clip-path:inset(0);clip-path:inset(0);display:block;margin-top:calc(var(--ifm-navbar-height)*-1);transition:width var(--ifm-transition-fast) ease;width:var(--doc-sidebar-width);will-change:width}.docSidebarContainerHidden_b3ry{cursor:pointer;width:var(--doc-sidebar-hidden-width)}.sidebarViewport_Xe31{height:100%;max-height:100vh;position:sticky;top:0}.docMainContainer_gTbr{flex-grow:1;max-width:calc(100% - var(--doc-sidebar-width))}.docMainContainerEnhanced_Uz_u{max-width:calc(100% - var(--doc-sidebar-hidden-width))}.docItemWrapperEnhanced_czyv{max-width:calc(var(--ifm-container-width) + var(--doc-sidebar-width))!important}.lastUpdated_vwxv{text-align:right}.tocMobile_ITEo{display:none}.docItemCol_VOVn{max-width:75%!important}}@media (min-width:1440px){.container{max-width:var(--ifm-container-width-xl)}}@media (max-width:996px){.col{--ifm-col-width:100%;flex-basis:var(--ifm-col-width);margin-left:0}.footer{--ifm-footer-padding-horizontal:0}.colorModeToggle_DEke,.footer__link-separator,.navbar__item,.sidebar_re4s,.tableOfContents_bqdL{display:none}.footer__col{margin-bottom:calc(var(--ifm-spacing-vertical)*3)}.footer__link-item{display:block}.hero{padding-left:0;padding-right:0}.navbar>.container,.navbar>.container-fluid{padding:0}.navbar__toggle{display:inherit}.navbar__search-input{width:9rem}.pills--block,.tabs--block{flex-direction:column}.searchBox_ZlJk{position:absolute;right:var(--ifm-navbar-padding-horizontal)}.docItemContainer_F8PC{padding:0 .3rem}}@media not (max-width:996px){.searchBar_RVTs.searchBarLeft_MXDe .dropdownMenu_qbY6{left:0!important;right:auto!important}}@media only screen and (max-width:996px){.searchQueryColumn_q7nx{max-width:60%!important}.searchContextColumn_oWAF{max-width:40%!important}}@media screen and (max-width:996px){.heroBanner_qdFl{padding:2rem}}@media (max-width:576px){.markdown h1:first-child{--ifm-h1-font-size:2rem}.markdown>h2{--ifm-h2-font-size:1.5rem}.markdown>h3{--ifm-h3-font-size:1.25rem}.navbar__search-input:not(:focus){width:2rem}.searchBar_RVTs .dropdownMenu_qbY6{max-width:calc(100vw - var(--ifm-navbar-padding-horizontal)*2);width:var(--search-local-modal-width-sm,340px)}.searchBarContainer_NW3z:not(.focused_OWtg) .searchClearButton_qk4g,.searchHintContainer_Pkmr{display:none}.title_f1Hy{font-size:2rem}}@media screen and (max-width:576px){.searchQueryColumn_q7nx{max-width:100%!important}.searchContextColumn_oWAF{max-width:100%!important;padding-left:var(--ifm-spacing-horizontal)!important}}@media (hover:hover){.backToTopButton_sjWU:hover{background-color:var(--ifm-color-emphasis-300)}}@media (pointer:fine){.thin-scrollbar{scrollbar-width:thin}.thin-scrollbar::-webkit-scrollbar{height:var(--ifm-scrollbar-size);width:var(--ifm-scrollbar-size)}.thin-scrollbar::-webkit-scrollbar-track{background:var(--ifm-scrollbar-track-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb{background:var(--ifm-scrollbar-thumb-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb:hover{background:var(--ifm-scrollbar-thumb-hover-background-color)}}@media (prefers-reduced-motion:reduce){:root{--ifm-transition-fast:0ms;--ifm-transition-slow:0ms}}@media print{.announcementBar_mb4j,.footer,.menu,.navbar,.pagination-nav,.table-of-contents,.tocMobile_ITEo{display:none}.tabs{page-break-inside:avoid}.codeBlockLines_e6Vv{white-space:pre-wrap}} \ No newline at end of file diff --git a/assets/css/styles.eb1ce4d6.css b/assets/css/styles.eb1ce4d6.css deleted file mode 100644 index 0debedc0e..000000000 --- a/assets/css/styles.eb1ce4d6.css +++ /dev/null @@ -1 +0,0 @@ -.col,.container{padding:0 var(--ifm-spacing-horizontal);width:100%}.markdown>h2,.markdown>h3,.markdown>h4,.markdown>h5,.markdown>h6{margin-bottom:calc(var(--ifm-heading-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown li,body{word-wrap:break-word}body,ol ol,ol ul,ul ol,ul ul{margin:0}pre,table{overflow:auto}blockquote,pre{margin:0 0 var(--ifm-spacing-vertical)}.breadcrumbs__link,.button{transition-timing-function:var(--ifm-transition-timing-default)}.button,code{vertical-align:middle}.button--outline.button--active,.button--outline:active,.button--outline:hover,:root{--ifm-button-color:var(--ifm-font-color-base-inverse)}.menu__link:hover,a{transition:color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.navbar--dark,:root{--ifm-navbar-link-hover-color:var(--ifm-color-primary)}.menu,.navbar-sidebar{overflow-x:hidden}:root,html[data-theme=dark]{--ifm-color-emphasis-500:var(--ifm-color-gray-500)}.toggleButton_gllP,html{-webkit-tap-highlight-color:transparent}*,.loadingRing_RJI3 div{box-sizing:border-box}.clean-list,.containsTaskList_mC6p,.details_lb9f>summary,.dropdown__menu,.menu__list{list-style:none}:root{--ifm-color-scheme:light;--ifm-dark-value:10%;--ifm-darker-value:15%;--ifm-darkest-value:30%;--ifm-light-value:15%;--ifm-lighter-value:30%;--ifm-lightest-value:50%;--ifm-contrast-background-value:90%;--ifm-contrast-foreground-value:70%;--ifm-contrast-background-dark-value:70%;--ifm-contrast-foreground-dark-value:90%;--ifm-color-primary:#3578e5;--ifm-color-secondary:#ebedf0;--ifm-color-success:#00a400;--ifm-color-info:#54c7ec;--ifm-color-warning:#ffba00;--ifm-color-danger:#fa383e;--ifm-color-primary-dark:#306cce;--ifm-color-primary-darker:#2d66c3;--ifm-color-primary-darkest:#2554a0;--ifm-color-primary-light:#538ce9;--ifm-color-primary-lighter:#72a1ed;--ifm-color-primary-lightest:#9abcf2;--ifm-color-primary-contrast-background:#ebf2fc;--ifm-color-primary-contrast-foreground:#102445;--ifm-color-secondary-dark:#d4d5d8;--ifm-color-secondary-darker:#c8c9cc;--ifm-color-secondary-darkest:#a4a6a8;--ifm-color-secondary-light:#eef0f2;--ifm-color-secondary-lighter:#f1f2f5;--ifm-color-secondary-lightest:#f5f6f8;--ifm-color-secondary-contrast-background:#fdfdfe;--ifm-color-secondary-contrast-foreground:#474748;--ifm-color-success-dark:#009400;--ifm-color-success-darker:#008b00;--ifm-color-success-darkest:#007300;--ifm-color-success-light:#26b226;--ifm-color-success-lighter:#4dbf4d;--ifm-color-success-lightest:#80d280;--ifm-color-success-contrast-background:#e6f6e6;--ifm-color-success-contrast-foreground:#003100;--ifm-color-info-dark:#4cb3d4;--ifm-color-info-darker:#47a9c9;--ifm-color-info-darkest:#3b8ba5;--ifm-color-info-light:#6ecfef;--ifm-color-info-lighter:#87d8f2;--ifm-color-info-lightest:#aae3f6;--ifm-color-info-contrast-background:#eef9fd;--ifm-color-info-contrast-foreground:#193c47;--ifm-color-warning-dark:#e6a700;--ifm-color-warning-darker:#d99e00;--ifm-color-warning-darkest:#b38200;--ifm-color-warning-light:#ffc426;--ifm-color-warning-lighter:#ffcf4d;--ifm-color-warning-lightest:#ffdd80;--ifm-color-warning-contrast-background:#fff8e6;--ifm-color-warning-contrast-foreground:#4d3800;--ifm-color-danger-dark:#e13238;--ifm-color-danger-darker:#d53035;--ifm-color-danger-darkest:#af272b;--ifm-color-danger-light:#fb565b;--ifm-color-danger-lighter:#fb7478;--ifm-color-danger-lightest:#fd9c9f;--ifm-color-danger-contrast-background:#ffebec;--ifm-color-danger-contrast-foreground:#4b1113;--ifm-color-white:#fff;--ifm-color-black:#000;--ifm-color-gray-0:var(--ifm-color-white);--ifm-color-gray-100:#f5f6f7;--ifm-color-gray-200:#ebedf0;--ifm-color-gray-300:#dadde1;--ifm-color-gray-400:#ccd0d5;--ifm-color-gray-500:#bec3c9;--ifm-color-gray-600:#8d949e;--ifm-color-gray-700:#606770;--ifm-color-gray-800:#444950;--ifm-color-gray-900:#1c1e21;--ifm-color-gray-1000:var(--ifm-color-black);--ifm-color-emphasis-0:var(--ifm-color-gray-0);--ifm-color-emphasis-100:var(--ifm-color-gray-100);--ifm-color-emphasis-200:var(--ifm-color-gray-200);--ifm-color-emphasis-300:var(--ifm-color-gray-300);--ifm-color-emphasis-400:var(--ifm-color-gray-400);--ifm-color-emphasis-600:var(--ifm-color-gray-600);--ifm-color-emphasis-700:var(--ifm-color-gray-700);--ifm-color-emphasis-800:var(--ifm-color-gray-800);--ifm-color-emphasis-900:var(--ifm-color-gray-900);--ifm-color-emphasis-1000:var(--ifm-color-gray-1000);--ifm-color-content:var(--ifm-color-emphasis-900);--ifm-color-content-inverse:var(--ifm-color-emphasis-0);--ifm-color-content-secondary:#525860;--ifm-background-color:#0000;--ifm-background-surface-color:var(--ifm-color-content-inverse);--ifm-global-border-width:1px;--ifm-global-radius:0.4rem;--ifm-hover-overlay:#0000000d;--ifm-font-color-base:var(--ifm-color-content);--ifm-font-color-base-inverse:var(--ifm-color-content-inverse);--ifm-font-color-secondary:var(--ifm-color-content-secondary);--ifm-font-family-base:system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";--ifm-font-family-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;--ifm-font-size-base:100%;--ifm-font-weight-light:300;--ifm-font-weight-normal:400;--ifm-font-weight-semibold:500;--ifm-font-weight-bold:700;--ifm-font-weight-base:var(--ifm-font-weight-normal);--ifm-line-height-base:1.65;--ifm-global-spacing:1rem;--ifm-spacing-vertical:var(--ifm-global-spacing);--ifm-spacing-horizontal:var(--ifm-global-spacing);--ifm-transition-fast:200ms;--ifm-transition-slow:400ms;--ifm-transition-timing-default:cubic-bezier(0.08,0.52,0.52,1);--ifm-global-shadow-lw:0 1px 2px 0 #0000001a;--ifm-global-shadow-md:0 5px 40px #0003;--ifm-global-shadow-tl:0 12px 28px 0 #0003,0 2px 4px 0 #0000001a;--ifm-z-index-dropdown:100;--ifm-z-index-fixed:200;--ifm-z-index-overlay:400;--ifm-container-width:1140px;--ifm-container-width-xl:1320px;--ifm-code-background:#f6f7f8;--ifm-code-border-radius:var(--ifm-global-radius);--ifm-code-font-size:90%;--ifm-code-padding-horizontal:0.1rem;--ifm-code-padding-vertical:0.1rem;--ifm-pre-background:var(--ifm-code-background);--ifm-pre-border-radius:var(--ifm-code-border-radius);--ifm-pre-color:inherit;--ifm-pre-line-height:1.45;--ifm-pre-padding:1rem;--ifm-heading-color:inherit;--ifm-heading-margin-top:0;--ifm-heading-margin-bottom:var(--ifm-spacing-vertical);--ifm-heading-font-family:var(--ifm-font-family-base);--ifm-heading-font-weight:var(--ifm-font-weight-bold);--ifm-heading-line-height:1.25;--ifm-h1-font-size:2rem;--ifm-h2-font-size:1.5rem;--ifm-h3-font-size:1.25rem;--ifm-h4-font-size:1rem;--ifm-h5-font-size:0.875rem;--ifm-h6-font-size:0.85rem;--ifm-image-alignment-padding:1.25rem;--ifm-leading-desktop:1.25;--ifm-leading:calc(var(--ifm-leading-desktop)*1rem);--ifm-list-left-padding:2rem;--ifm-list-margin:1rem;--ifm-list-item-margin:0.25rem;--ifm-list-paragraph-margin:1rem;--ifm-table-cell-padding:0.75rem;--ifm-table-background:#0000;--ifm-table-stripe-background:#00000008;--ifm-table-border-width:1px;--ifm-table-border-color:var(--ifm-color-emphasis-300);--ifm-table-head-background:inherit;--ifm-table-head-color:inherit;--ifm-table-head-font-weight:var(--ifm-font-weight-bold);--ifm-table-cell-color:inherit;--ifm-link-color:var(--ifm-color-primary);--ifm-link-decoration:none;--ifm-link-hover-color:var(--ifm-link-color);--ifm-link-hover-decoration:underline;--ifm-paragraph-margin-bottom:var(--ifm-leading);--ifm-blockquote-font-size:var(--ifm-font-size-base);--ifm-blockquote-border-left-width:2px;--ifm-blockquote-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-blockquote-padding-vertical:0;--ifm-blockquote-shadow:none;--ifm-blockquote-color:var(--ifm-color-emphasis-800);--ifm-blockquote-border-color:var(--ifm-color-emphasis-300);--ifm-hr-background-color:var(--ifm-color-emphasis-500);--ifm-hr-height:1px;--ifm-hr-margin-vertical:1.5rem;--ifm-scrollbar-size:7px;--ifm-scrollbar-track-background-color:#f1f1f1;--ifm-scrollbar-thumb-background-color:silver;--ifm-scrollbar-thumb-hover-background-color:#a7a7a7;--ifm-alert-background-color:inherit;--ifm-alert-border-color:inherit;--ifm-alert-border-radius:var(--ifm-global-radius);--ifm-alert-border-width:0px;--ifm-alert-border-left-width:5px;--ifm-alert-color:var(--ifm-font-color-base);--ifm-alert-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-alert-padding-vertical:var(--ifm-spacing-vertical);--ifm-alert-shadow:var(--ifm-global-shadow-lw);--ifm-avatar-intro-margin:1rem;--ifm-avatar-intro-alignment:inherit;--ifm-avatar-photo-size:3rem;--ifm-badge-background-color:inherit;--ifm-badge-border-color:inherit;--ifm-badge-border-radius:var(--ifm-global-radius);--ifm-badge-border-width:var(--ifm-global-border-width);--ifm-badge-color:var(--ifm-color-white);--ifm-badge-padding-horizontal:calc(var(--ifm-spacing-horizontal)*0.5);--ifm-badge-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-breadcrumb-border-radius:1.5rem;--ifm-breadcrumb-spacing:0.5rem;--ifm-breadcrumb-color-active:var(--ifm-color-primary);--ifm-breadcrumb-item-background-active:var(--ifm-hover-overlay);--ifm-breadcrumb-padding-horizontal:0.8rem;--ifm-breadcrumb-padding-vertical:0.4rem;--ifm-breadcrumb-size-multiplier:1;--ifm-breadcrumb-separator:url('data:image/svg+xml;utf8,');--ifm-breadcrumb-separator-filter:none;--ifm-breadcrumb-separator-size:0.5rem;--ifm-breadcrumb-separator-size-multiplier:1.25;--ifm-button-background-color:inherit;--ifm-button-border-color:var(--ifm-button-background-color);--ifm-button-border-width:var(--ifm-global-border-width);--ifm-button-font-weight:var(--ifm-font-weight-bold);--ifm-button-padding-horizontal:1.5rem;--ifm-button-padding-vertical:0.375rem;--ifm-button-size-multiplier:1;--ifm-button-transition-duration:var(--ifm-transition-fast);--ifm-button-border-radius:calc(var(--ifm-global-radius)*var(--ifm-button-size-multiplier));--ifm-button-group-spacing:2px;--ifm-card-background-color:var(--ifm-background-surface-color);--ifm-card-border-radius:calc(var(--ifm-global-radius)*2);--ifm-card-horizontal-spacing:var(--ifm-global-spacing);--ifm-card-vertical-spacing:var(--ifm-global-spacing);--ifm-toc-border-color:var(--ifm-color-emphasis-300);--ifm-toc-link-color:var(--ifm-color-content-secondary);--ifm-toc-padding-vertical:0.5rem;--ifm-toc-padding-horizontal:0.5rem;--ifm-dropdown-background-color:var(--ifm-background-surface-color);--ifm-dropdown-font-weight:var(--ifm-font-weight-semibold);--ifm-dropdown-link-color:var(--ifm-font-color-base);--ifm-dropdown-hover-background-color:var(--ifm-hover-overlay);--ifm-footer-background-color:var(--ifm-color-emphasis-100);--ifm-footer-color:inherit;--ifm-footer-link-color:var(--ifm-color-emphasis-700);--ifm-footer-link-hover-color:var(--ifm-color-primary);--ifm-footer-link-horizontal-spacing:0.5rem;--ifm-footer-padding-horizontal:calc(var(--ifm-spacing-horizontal)*2);--ifm-footer-padding-vertical:calc(var(--ifm-spacing-vertical)*2);--ifm-footer-title-color:inherit;--ifm-footer-logo-max-width:min(30rem,90vw);--ifm-hero-background-color:var(--ifm-background-surface-color);--ifm-hero-text-color:var(--ifm-color-emphasis-800);--ifm-menu-color:var(--ifm-color-emphasis-700);--ifm-menu-color-active:var(--ifm-color-primary);--ifm-menu-color-background-active:var(--ifm-hover-overlay);--ifm-menu-color-background-hover:var(--ifm-hover-overlay);--ifm-menu-link-padding-horizontal:0.75rem;--ifm-menu-link-padding-vertical:0.375rem;--ifm-menu-link-sublist-icon:url('data:image/svg+xml;utf8,');--ifm-menu-link-sublist-icon-filter:none;--ifm-navbar-background-color:var(--ifm-background-surface-color);--ifm-navbar-height:3.75rem;--ifm-navbar-item-padding-horizontal:0.75rem;--ifm-navbar-item-padding-vertical:0.25rem;--ifm-navbar-link-color:var(--ifm-font-color-base);--ifm-navbar-link-active-color:var(--ifm-link-color);--ifm-navbar-padding-horizontal:var(--ifm-spacing-horizontal);--ifm-navbar-padding-vertical:calc(var(--ifm-spacing-vertical)*0.5);--ifm-navbar-shadow:var(--ifm-global-shadow-lw);--ifm-navbar-search-input-background-color:var(--ifm-color-emphasis-200);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-800);--ifm-navbar-search-input-placeholder-color:var(--ifm-color-emphasis-500);--ifm-navbar-search-input-icon:url('data:image/svg+xml;utf8,');--ifm-navbar-sidebar-width:83vw;--ifm-pagination-border-radius:var(--ifm-global-radius);--ifm-pagination-color-active:var(--ifm-color-primary);--ifm-pagination-font-size:1rem;--ifm-pagination-item-active-background:var(--ifm-hover-overlay);--ifm-pagination-page-spacing:0.2em;--ifm-pagination-padding-horizontal:calc(var(--ifm-spacing-horizontal)*1);--ifm-pagination-padding-vertical:calc(var(--ifm-spacing-vertical)*0.25);--ifm-pagination-nav-border-radius:var(--ifm-global-radius);--ifm-pagination-nav-color-hover:var(--ifm-color-primary);--ifm-pills-color-active:var(--ifm-color-primary);--ifm-pills-color-background-active:var(--ifm-hover-overlay);--ifm-pills-spacing:0.125rem;--ifm-tabs-color:var(--ifm-font-color-secondary);--ifm-tabs-color-active:var(--ifm-color-primary);--ifm-tabs-color-active-border:var(--ifm-tabs-color-active);--ifm-tabs-padding-horizontal:1rem;--ifm-tabs-padding-vertical:1rem;--docusaurus-progress-bar-color:var(--ifm-color-primary);--docusaurus-announcement-bar-height:auto;--docusaurus-collapse-button-bg:#0000;--docusaurus-collapse-button-bg-hover:#0000001a;--doc-sidebar-width:300px;--doc-sidebar-hidden-width:30px;--docusaurus-tag-list-border:var(--ifm-color-emphasis-300)}.badge--danger,.badge--info,.badge--primary,.badge--secondary,.badge--success,.badge--warning{--ifm-badge-border-color:var(--ifm-badge-background-color)}.button--link,.button--outline{--ifm-button-background-color:#0000}html{-webkit-font-smoothing:antialiased;-webkit-text-size-adjust:100%;text-size-adjust:100%;background-color:var(--ifm-background-color);color:var(--ifm-font-color-base);color-scheme:var(--ifm-color-scheme);font:var(--ifm-font-size-base)/var(--ifm-line-height-base) var(--ifm-font-family-base);text-rendering:optimizelegibility}iframe{border:0;color-scheme:auto}.container{margin:0 auto;max-width:var(--ifm-container-width)}.container--fluid{max-width:inherit}.row{display:flex;flex-wrap:wrap;margin:0 calc(var(--ifm-spacing-horizontal)*-1)}.margin-bottom--none,.margin-vert--none,.markdown>:last-child{margin-bottom:0!important}.margin-top--none,.margin-vert--none{margin-top:0!important}.row--no-gutters{margin-left:0;margin-right:0}.margin-horiz--none,.margin-right--none{margin-right:0!important}.row--no-gutters>.col{padding-left:0;padding-right:0}.row--align-top{align-items:flex-start}.row--align-bottom{align-items:flex-end}.menuExternalLink_NmtK,.row--align-center{align-items:center}.row--align-stretch{align-items:stretch}.row--align-baseline{align-items:baseline}.col{--ifm-col-width:100%;flex:1 0;margin-left:0;max-width:var(--ifm-col-width)}.padding-bottom--none,.padding-vert--none{padding-bottom:0!important}.padding-top--none,.padding-vert--none{padding-top:0!important}.padding-horiz--none,.padding-left--none{padding-left:0!important}.padding-horiz--none,.padding-right--none{padding-right:0!important}.col[class*=col--]{flex:0 0 var(--ifm-col-width)}.col--1{--ifm-col-width:8.33333%}.col--offset-1{margin-left:8.33333%}.col--2{--ifm-col-width:16.66667%}.col--offset-2{margin-left:16.66667%}.col--3{--ifm-col-width:25%}.col--offset-3{margin-left:25%}.col--4{--ifm-col-width:33.33333%}.col--offset-4{margin-left:33.33333%}.col--5{--ifm-col-width:41.66667%}.col--offset-5{margin-left:41.66667%}.col--6{--ifm-col-width:50%}.col--offset-6{margin-left:50%}.col--7{--ifm-col-width:58.33333%}.col--offset-7{margin-left:58.33333%}.col--8{--ifm-col-width:66.66667%}.col--offset-8{margin-left:66.66667%}.col--9{--ifm-col-width:75%}.col--offset-9{margin-left:75%}.col--10{--ifm-col-width:83.33333%}.col--offset-10{margin-left:83.33333%}.col--11{--ifm-col-width:91.66667%}.col--offset-11{margin-left:91.66667%}.col--12{--ifm-col-width:100%}.col--offset-12{margin-left:100%}.margin-horiz--none,.margin-left--none{margin-left:0!important}.margin--none{margin:0!important}.margin-bottom--xs,.margin-vert--xs{margin-bottom:.25rem!important}.margin-top--xs,.margin-vert--xs{margin-top:.25rem!important}.margin-horiz--xs,.margin-left--xs{margin-left:.25rem!important}.margin-horiz--xs,.margin-right--xs{margin-right:.25rem!important}.margin--xs{margin:.25rem!important}.margin-bottom--sm,.margin-vert--sm{margin-bottom:.5rem!important}.margin-top--sm,.margin-vert--sm{margin-top:.5rem!important}.margin-horiz--sm,.margin-left--sm{margin-left:.5rem!important}.margin-horiz--sm,.margin-right--sm{margin-right:.5rem!important}.margin--sm{margin:.5rem!important}.margin-bottom--md,.margin-vert--md{margin-bottom:1rem!important}.margin-top--md,.margin-vert--md{margin-top:1rem!important}.margin-horiz--md,.margin-left--md{margin-left:1rem!important}.margin-horiz--md,.margin-right--md{margin-right:1rem!important}.margin--md{margin:1rem!important}.margin-bottom--lg,.margin-vert--lg{margin-bottom:2rem!important}.margin-top--lg,.margin-vert--lg{margin-top:2rem!important}.margin-horiz--lg,.margin-left--lg{margin-left:2rem!important}.margin-horiz--lg,.margin-right--lg{margin-right:2rem!important}.margin--lg{margin:2rem!important}.margin-bottom--xl,.margin-vert--xl{margin-bottom:5rem!important}.margin-top--xl,.margin-vert--xl{margin-top:5rem!important}.margin-horiz--xl,.margin-left--xl{margin-left:5rem!important}.margin-horiz--xl,.margin-right--xl{margin-right:5rem!important}.margin--xl{margin:5rem!important}.padding--none{padding:0!important}.padding-bottom--xs,.padding-vert--xs{padding-bottom:.25rem!important}.padding-top--xs,.padding-vert--xs{padding-top:.25rem!important}.padding-horiz--xs,.padding-left--xs{padding-left:.25rem!important}.padding-horiz--xs,.padding-right--xs{padding-right:.25rem!important}.padding--xs{padding:.25rem!important}.padding-bottom--sm,.padding-vert--sm{padding-bottom:.5rem!important}.padding-top--sm,.padding-vert--sm{padding-top:.5rem!important}.padding-horiz--sm,.padding-left--sm{padding-left:.5rem!important}.padding-horiz--sm,.padding-right--sm{padding-right:.5rem!important}.padding--sm{padding:.5rem!important}.padding-bottom--md,.padding-vert--md{padding-bottom:1rem!important}.padding-top--md,.padding-vert--md{padding-top:1rem!important}.padding-horiz--md,.padding-left--md{padding-left:1rem!important}.padding-horiz--md,.padding-right--md{padding-right:1rem!important}.padding--md{padding:1rem!important}.padding-bottom--lg,.padding-vert--lg{padding-bottom:2rem!important}.padding-top--lg,.padding-vert--lg{padding-top:2rem!important}.padding-horiz--lg,.padding-left--lg{padding-left:2rem!important}.padding-horiz--lg,.padding-right--lg{padding-right:2rem!important}.padding--lg{padding:2rem!important}.padding-bottom--xl,.padding-vert--xl{padding-bottom:5rem!important}.padding-top--xl,.padding-vert--xl{padding-top:5rem!important}.padding-horiz--xl,.padding-left--xl{padding-left:5rem!important}.padding-horiz--xl,.padding-right--xl{padding-right:5rem!important}.padding--xl{padding:5rem!important}code{background-color:var(--ifm-code-background);border:.1rem solid #0000001a;border-radius:var(--ifm-code-border-radius);font-family:var(--ifm-font-family-monospace);font-size:var(--ifm-code-font-size);padding:var(--ifm-code-padding-vertical) var(--ifm-code-padding-horizontal)}a code{color:inherit}pre{background-color:var(--ifm-pre-background);border-radius:var(--ifm-pre-border-radius);color:var(--ifm-pre-color);font:var(--ifm-code-font-size)/var(--ifm-pre-line-height) var(--ifm-font-family-monospace);padding:var(--ifm-pre-padding)}pre code{background-color:initial;border:none;font-size:100%;line-height:inherit;padding:0}kbd{background-color:var(--ifm-color-emphasis-0);border:1px solid var(--ifm-color-emphasis-400);border-radius:.2rem;box-shadow:inset 0 -1px 0 var(--ifm-color-emphasis-400);color:var(--ifm-color-emphasis-800);font:80% var(--ifm-font-family-monospace);padding:.15rem .3rem}h1,h2,h3,h4,h5,h6{color:var(--ifm-heading-color);font-family:var(--ifm-heading-font-family);font-weight:var(--ifm-heading-font-weight);line-height:var(--ifm-heading-line-height);margin:var(--ifm-heading-margin-top) 0 var(--ifm-heading-margin-bottom) 0}.text--primary,.wordWrapButtonEnabled_EoeP .wordWrapButtonIcon_Bwma,h1{color:var(--ifm-color-primary)}h1{font-size:var(--ifm-h1-font-size)}h2{font-size:var(--ifm-h2-font-size)}h3{font-size:var(--ifm-h3-font-size)}h4{font-size:var(--ifm-h4-font-size)}h5{font-size:var(--ifm-h5-font-size)}h6{font-size:var(--ifm-h6-font-size)}img{max-width:100%}img[align=right]{padding-left:var(--image-alignment-padding)}img[align=left]{padding-right:var(--image-alignment-padding)}.markdown{--ifm-h1-vertical-rhythm-top:3;--ifm-h2-vertical-rhythm-top:2;--ifm-h3-vertical-rhythm-top:1.5;--ifm-heading-vertical-rhythm-top:1.25;--ifm-h1-vertical-rhythm-bottom:1.25;--ifm-heading-vertical-rhythm-bottom:1}.markdown:after,.markdown:before{content:"";display:table}.markdown:after{clear:both}.markdown h1:first-child{--ifm-h1-font-size:3rem;margin-bottom:calc(var(--ifm-h1-vertical-rhythm-bottom)*var(--ifm-leading))}.markdown>h2{--ifm-h2-font-size:2rem;margin-top:calc(var(--ifm-h2-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h3{--ifm-h3-font-size:1.5rem;margin-top:calc(var(--ifm-h3-vertical-rhythm-top)*var(--ifm-leading))}.markdown>h4,.markdown>h5,.markdown>h6{margin-top:calc(var(--ifm-heading-vertical-rhythm-top)*var(--ifm-leading))}.markdown>p,.markdown>pre,.markdown>ul{margin-bottom:var(--ifm-leading)}.markdown li>p{margin-top:var(--ifm-list-paragraph-margin)}.markdown li+li{margin-top:var(--ifm-list-item-margin)}ol,ul{margin:0 0 var(--ifm-list-margin);padding-left:var(--ifm-list-left-padding)}ol ol,ul ol{list-style-type:lower-roman}ol ol ol,ol ul ol,ul ol ol,ul ul ol{list-style-type:lower-alpha}table{border-collapse:collapse;display:block;margin-bottom:var(--ifm-spacing-vertical)}table thead tr{border-bottom:2px solid var(--ifm-table-border-color)}table thead,table tr:nth-child(2n){background-color:var(--ifm-table-stripe-background)}table tr{background-color:var(--ifm-table-background);border-top:var(--ifm-table-border-width) solid var(--ifm-table-border-color)}table td,table th{border:var(--ifm-table-border-width) solid var(--ifm-table-border-color);padding:var(--ifm-table-cell-padding)}table th{background-color:var(--ifm-table-head-background);color:var(--ifm-table-head-color);font-weight:var(--ifm-table-head-font-weight)}table td{color:var(--ifm-table-cell-color)}strong{font-weight:var(--ifm-font-weight-bold)}a{color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}a:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button:hover,.text--no-decoration,.text--no-decoration:hover,a:not([href]){text-decoration:none}p{margin:0 0 var(--ifm-paragraph-margin-bottom)}blockquote{border-left:var(--ifm-blockquote-border-left-width) solid var(--ifm-blockquote-border-color);box-shadow:var(--ifm-blockquote-shadow);color:var(--ifm-blockquote-color);font-size:var(--ifm-blockquote-font-size);padding:var(--ifm-blockquote-padding-vertical) var(--ifm-blockquote-padding-horizontal)}blockquote>:first-child{margin-top:0}blockquote>:last-child{margin-bottom:0}hr{background-color:var(--ifm-hr-background-color);border:0;height:var(--ifm-hr-height);margin:var(--ifm-hr-margin-vertical) 0}.shadow--lw{box-shadow:var(--ifm-global-shadow-lw)!important}.shadow--md{box-shadow:var(--ifm-global-shadow-md)!important}.shadow--tl{box-shadow:var(--ifm-global-shadow-tl)!important}.text--secondary{color:var(--ifm-color-secondary)}.text--success{color:var(--ifm-color-success)}.text--info{color:var(--ifm-color-info)}.text--warning{color:var(--ifm-color-warning)}.text--danger{color:var(--ifm-color-danger)}.text--center{text-align:center}.text--left{text-align:left}.text--justify{text-align:justify}.text--right{text-align:right}.text--capitalize{text-transform:capitalize}.text--lowercase{text-transform:lowercase}.admonitionHeading_tbUL,.alert__heading,.text--uppercase{text-transform:uppercase}.text--light{font-weight:var(--ifm-font-weight-light)}.text--normal{font-weight:var(--ifm-font-weight-normal)}.text--semibold{font-weight:var(--ifm-font-weight-semibold)}.text--bold{font-weight:var(--ifm-font-weight-bold)}.text--italic{font-style:italic}.text--truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text--break{word-wrap:break-word!important;word-break:break-word!important}.clean-btn{background:none;border:none;color:inherit;cursor:pointer;font-family:inherit;padding:0}.alert,.alert .close{color:var(--ifm-alert-foreground-color)}.clean-list{padding-left:0}.alert--primary{--ifm-alert-background-color:var(--ifm-color-primary-contrast-background);--ifm-alert-background-color-highlight:#3578e526;--ifm-alert-foreground-color:var(--ifm-color-primary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-primary-dark)}.alert--secondary{--ifm-alert-background-color:var(--ifm-color-secondary-contrast-background);--ifm-alert-background-color-highlight:#ebedf026;--ifm-alert-foreground-color:var(--ifm-color-secondary-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-secondary-dark)}.alert--success{--ifm-alert-background-color:var(--ifm-color-success-contrast-background);--ifm-alert-background-color-highlight:#00a40026;--ifm-alert-foreground-color:var(--ifm-color-success-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-success-dark)}.alert--info{--ifm-alert-background-color:var(--ifm-color-info-contrast-background);--ifm-alert-background-color-highlight:#54c7ec26;--ifm-alert-foreground-color:var(--ifm-color-info-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-info-dark)}.alert--warning{--ifm-alert-background-color:var(--ifm-color-warning-contrast-background);--ifm-alert-background-color-highlight:#ffba0026;--ifm-alert-foreground-color:var(--ifm-color-warning-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-warning-dark)}.alert--danger{--ifm-alert-background-color:var(--ifm-color-danger-contrast-background);--ifm-alert-background-color-highlight:#fa383e26;--ifm-alert-foreground-color:var(--ifm-color-danger-contrast-foreground);--ifm-alert-border-color:var(--ifm-color-danger-dark)}.alert{--ifm-code-background:var(--ifm-alert-background-color-highlight);--ifm-link-color:var(--ifm-alert-foreground-color);--ifm-link-hover-color:var(--ifm-alert-foreground-color);--ifm-link-decoration:underline;--ifm-tabs-color:var(--ifm-alert-foreground-color);--ifm-tabs-color-active:var(--ifm-alert-foreground-color);--ifm-tabs-color-active-border:var(--ifm-alert-border-color);background-color:var(--ifm-alert-background-color);border:var(--ifm-alert-border-width) solid var(--ifm-alert-border-color);border-left-width:var(--ifm-alert-border-left-width);border-radius:var(--ifm-alert-border-radius);box-shadow:var(--ifm-alert-shadow);padding:var(--ifm-alert-padding-vertical) var(--ifm-alert-padding-horizontal)}.alert__heading{align-items:center;display:flex;font:700 var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.5rem}.alert__icon{display:inline-flex;margin-right:.4em}.alert__icon svg{fill:var(--ifm-alert-foreground-color);stroke:var(--ifm-alert-foreground-color);stroke-width:0}.alert .close{margin:calc(var(--ifm-alert-padding-vertical)*-1) calc(var(--ifm-alert-padding-horizontal)*-1) 0 0;opacity:.75}.alert .close:focus,.alert .close:hover{opacity:1}.alert a{text-decoration-color:var(--ifm-alert-border-color)}.alert a:hover{text-decoration-thickness:2px}.avatar{column-gap:var(--ifm-avatar-intro-margin);display:flex}.avatar__photo{border-radius:50%;display:block;height:var(--ifm-avatar-photo-size);overflow:hidden;width:var(--ifm-avatar-photo-size)}.card--full-height,.navbar__logo img,body,html{height:100%}.avatar__photo--sm{--ifm-avatar-photo-size:2rem}.avatar__photo--lg{--ifm-avatar-photo-size:4rem}.avatar__photo--xl{--ifm-avatar-photo-size:6rem}.avatar__intro{display:flex;flex:1 1;flex-direction:column;justify-content:center;text-align:var(--ifm-avatar-intro-alignment)}.badge,.breadcrumbs__item,.breadcrumbs__link,.button,.dropdown>.navbar__link:after,.searchBarContainer_NW3z.searchIndexLoading_EJ1f .searchBarLoadingRing_YnHq{display:inline-block}.avatar__name{font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base)}.avatar__subtitle{margin-top:.25rem}.avatar--vertical{--ifm-avatar-intro-alignment:center;--ifm-avatar-intro-margin:0.5rem;align-items:center;flex-direction:column}.badge{background-color:var(--ifm-badge-background-color);border:var(--ifm-badge-border-width) solid var(--ifm-badge-border-color);border-radius:var(--ifm-badge-border-radius);color:var(--ifm-badge-color);font-size:75%;font-weight:var(--ifm-font-weight-bold);line-height:1;padding:var(--ifm-badge-padding-vertical) var(--ifm-badge-padding-horizontal)}.badge--primary{--ifm-badge-background-color:var(--ifm-color-primary)}.badge--secondary{--ifm-badge-background-color:var(--ifm-color-secondary);color:var(--ifm-color-black)}.breadcrumbs__link,.button.button--secondary.button--outline:not(.button--active):not(:hover){color:var(--ifm-font-color-base)}.badge--success{--ifm-badge-background-color:var(--ifm-color-success)}.badge--info{--ifm-badge-background-color:var(--ifm-color-info)}.badge--warning{--ifm-badge-background-color:var(--ifm-color-warning)}.badge--danger{--ifm-badge-background-color:var(--ifm-color-danger)}.breadcrumbs{margin-bottom:0;padding-left:0}.breadcrumbs__item:not(:last-child):after{background:var(--ifm-breadcrumb-separator) center;content:" ";display:inline-block;filter:var(--ifm-breadcrumb-separator-filter);height:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier));margin:0 var(--ifm-breadcrumb-spacing);opacity:.5;width:calc(var(--ifm-breadcrumb-separator-size)*var(--ifm-breadcrumb-size-multiplier)*var(--ifm-breadcrumb-separator-size-multiplier))}.breadcrumbs__item--active .breadcrumbs__link{background:var(--ifm-breadcrumb-item-background-active);color:var(--ifm-breadcrumb-color-active)}.breadcrumbs__link{border-radius:var(--ifm-breadcrumb-border-radius);font-size:calc(1rem*var(--ifm-breadcrumb-size-multiplier));padding:calc(var(--ifm-breadcrumb-padding-vertical)*var(--ifm-breadcrumb-size-multiplier)) calc(var(--ifm-breadcrumb-padding-horizontal)*var(--ifm-breadcrumb-size-multiplier));transition-duration:var(--ifm-transition-fast);transition-property:background,color}.breadcrumbs__link:any-link:hover,.breadcrumbs__link:link:hover,.breadcrumbs__link:visited:hover,area[href].breadcrumbs__link:hover{background:var(--ifm-breadcrumb-item-background-active);text-decoration:none}.breadcrumbs--sm{--ifm-breadcrumb-size-multiplier:0.8}.breadcrumbs--lg{--ifm-breadcrumb-size-multiplier:1.2}.button{background-color:var(--ifm-button-background-color);border:var(--ifm-button-border-width) solid var(--ifm-button-border-color);border-radius:var(--ifm-button-border-radius);cursor:pointer;font-size:calc(.875rem*var(--ifm-button-size-multiplier));font-weight:var(--ifm-button-font-weight);line-height:1.5;padding:calc(var(--ifm-button-padding-vertical)*var(--ifm-button-size-multiplier)) calc(var(--ifm-button-padding-horizontal)*var(--ifm-button-size-multiplier));text-align:center;transition-duration:var(--ifm-button-transition-duration);transition-property:color,background,border-color;-webkit-user-select:none;user-select:none;white-space:nowrap}.button,.button:hover{color:var(--ifm-button-color)}.button--outline{--ifm-button-color:var(--ifm-button-border-color)}.button--outline:hover{--ifm-button-background-color:var(--ifm-button-border-color)}.button--link{--ifm-button-border-color:#0000;color:var(--ifm-link-color);text-decoration:var(--ifm-link-decoration)}.button--link.button--active,.button--link:active,.button--link:hover{color:var(--ifm-link-hover-color);text-decoration:var(--ifm-link-hover-decoration)}.button.disabled,.button:disabled,.button[disabled]{opacity:.65;pointer-events:none}.button--sm{--ifm-button-size-multiplier:0.8}.button--lg{--ifm-button-size-multiplier:1.35}.button--block{display:block;width:100%}.button.button--secondary{color:var(--ifm-color-gray-900)}:where(.button--primary){--ifm-button-background-color:var(--ifm-color-primary);--ifm-button-border-color:var(--ifm-color-primary)}:where(.button--primary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-primary-dark);--ifm-button-border-color:var(--ifm-color-primary-dark)}.button--primary.button--active,.button--primary:active{--ifm-button-background-color:var(--ifm-color-primary-darker);--ifm-button-border-color:var(--ifm-color-primary-darker)}:where(.button--secondary){--ifm-button-background-color:var(--ifm-color-secondary);--ifm-button-border-color:var(--ifm-color-secondary)}:where(.button--secondary):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-secondary-dark);--ifm-button-border-color:var(--ifm-color-secondary-dark)}.button--secondary.button--active,.button--secondary:active{--ifm-button-background-color:var(--ifm-color-secondary-darker);--ifm-button-border-color:var(--ifm-color-secondary-darker)}:where(.button--success){--ifm-button-background-color:var(--ifm-color-success);--ifm-button-border-color:var(--ifm-color-success)}:where(.button--success):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-success-dark);--ifm-button-border-color:var(--ifm-color-success-dark)}.button--success.button--active,.button--success:active{--ifm-button-background-color:var(--ifm-color-success-darker);--ifm-button-border-color:var(--ifm-color-success-darker)}:where(.button--info){--ifm-button-background-color:var(--ifm-color-info);--ifm-button-border-color:var(--ifm-color-info)}:where(.button--info):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-info-dark);--ifm-button-border-color:var(--ifm-color-info-dark)}.button--info.button--active,.button--info:active{--ifm-button-background-color:var(--ifm-color-info-darker);--ifm-button-border-color:var(--ifm-color-info-darker)}:where(.button--warning){--ifm-button-background-color:var(--ifm-color-warning);--ifm-button-border-color:var(--ifm-color-warning)}:where(.button--warning):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-warning-dark);--ifm-button-border-color:var(--ifm-color-warning-dark)}.button--warning.button--active,.button--warning:active{--ifm-button-background-color:var(--ifm-color-warning-darker);--ifm-button-border-color:var(--ifm-color-warning-darker)}:where(.button--danger){--ifm-button-background-color:var(--ifm-color-danger);--ifm-button-border-color:var(--ifm-color-danger)}:where(.button--danger):not(.button--outline):hover{--ifm-button-background-color:var(--ifm-color-danger-dark);--ifm-button-border-color:var(--ifm-color-danger-dark)}.button--danger.button--active,.button--danger:active{--ifm-button-background-color:var(--ifm-color-danger-darker);--ifm-button-border-color:var(--ifm-color-danger-darker)}.button-group{display:inline-flex;gap:var(--ifm-button-group-spacing)}.button-group>.button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.button-group>.button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0}.button-group--block{display:flex;justify-content:stretch}.button-group--block>.button{flex-grow:1}.card{background-color:var(--ifm-card-background-color);border-radius:var(--ifm-card-border-radius);box-shadow:var(--ifm-global-shadow-lw);display:flex;flex-direction:column;overflow:hidden}.card__image{padding-top:var(--ifm-card-vertical-spacing)}.card__image:first-child{padding-top:0}.card__body,.card__footer,.card__header{padding:var(--ifm-card-vertical-spacing) var(--ifm-card-horizontal-spacing)}.card__body:not(:last-child),.card__footer:not(:last-child),.card__header:not(:last-child){padding-bottom:0}.card__body>:last-child,.card__footer>:last-child,.card__header>:last-child{margin-bottom:0}.card__footer{margin-top:auto}.table-of-contents{font-size:.8rem;margin-bottom:0;padding:var(--ifm-toc-padding-vertical) 0}.table-of-contents,.table-of-contents ul{list-style:none;padding-left:var(--ifm-toc-padding-horizontal)}.table-of-contents li{margin:var(--ifm-toc-padding-vertical) var(--ifm-toc-padding-horizontal)}.table-of-contents__left-border{border-left:1px solid var(--ifm-toc-border-color)}.table-of-contents__link{color:var(--ifm-toc-link-color);display:block}.table-of-contents__link--active,.table-of-contents__link--active code,.table-of-contents__link:hover,.table-of-contents__link:hover code{color:var(--ifm-color-primary);text-decoration:none}.content_knG7 a,.hitFooter_E9YW a,.suggestion_fB_2.cursor_eG29 mark{text-decoration:underline}.close{color:var(--ifm-color-black);float:right;font-size:1.5rem;font-weight:var(--ifm-font-weight-bold);line-height:1;opacity:.5;padding:1rem;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.close:hover{opacity:.7}.close:focus,.theme-code-block-highlighted-line .codeLineNumber_Tfdd:before{opacity:.8}.dropdown{display:inline-flex;font-weight:var(--ifm-dropdown-font-weight);position:relative;vertical-align:top}.dropdown--hoverable:hover .dropdown__menu,.dropdown--show .dropdown__menu{opacity:1;pointer-events:all;transform:translateY(-1px);visibility:visible}.dropdown--right .dropdown__menu{left:inherit;right:0}.dropdown--nocaret .navbar__link:after{content:none!important}.dropdown__menu{background-color:var(--ifm-dropdown-background-color);border-radius:var(--ifm-global-radius);box-shadow:var(--ifm-global-shadow-md);left:0;max-height:80vh;min-width:10rem;opacity:0;overflow-y:auto;padding:.5rem;pointer-events:none;position:absolute;top:calc(100% - var(--ifm-navbar-item-padding-vertical) + .3rem);transform:translateY(-.625rem);transition-duration:var(--ifm-transition-fast);transition-property:opacity,transform,visibility;transition-timing-function:var(--ifm-transition-timing-default);visibility:hidden;z-index:var(--ifm-z-index-dropdown)}.sidebar_re4s,.tableOfContents_bqdL{max-height:calc(100vh - var(--ifm-navbar-height) - 2rem)}.menu__caret,.menu__link,.menu__list-item-collapsible{border-radius:.25rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.dropdown__link{border-radius:.25rem;color:var(--ifm-dropdown-link-color);display:block;font-size:.875rem;margin-top:.2rem;padding:.25rem .5rem;white-space:nowrap}.dropdown__link--active,.dropdown__link:hover{background-color:var(--ifm-dropdown-hover-background-color);color:var(--ifm-dropdown-link-color);text-decoration:none}.dropdown__link--active,.dropdown__link--active:hover{--ifm-dropdown-link-color:var(--ifm-link-color)}.dropdown>.navbar__link:after{border-color:currentcolor #0000;border-style:solid;border-width:.4em .4em 0;content:"";margin-left:.3em;position:relative;top:2px;transform:translateY(-50%)}.footer{background-color:var(--ifm-footer-background-color);color:var(--ifm-footer-color);padding:var(--ifm-footer-padding-vertical) var(--ifm-footer-padding-horizontal)}.footer--dark{--ifm-footer-background-color:#303846;--ifm-footer-color:var(--ifm-footer-link-color);--ifm-footer-link-color:var(--ifm-color-secondary);--ifm-footer-title-color:var(--ifm-color-white)}.footer__links{margin-bottom:1rem}.footer__link-item{color:var(--ifm-footer-link-color);line-height:2}.footer__link-item:hover{color:var(--ifm-footer-link-hover-color)}.footer__link-separator{margin:0 var(--ifm-footer-link-horizontal-spacing)}.footer__logo{margin-top:1rem;max-width:var(--ifm-footer-logo-max-width)}.footer__title{color:var(--ifm-footer-title-color);font:700 var(--ifm-h4-font-size)/var(--ifm-heading-line-height) var(--ifm-font-family-base);margin-bottom:var(--ifm-heading-margin-bottom)}.menu,.navbar__link{font-weight:var(--ifm-font-weight-semibold)}.docItemContainer_Djhp article>:first-child,.docItemContainer_Djhp header+*,.footer__item{margin-top:0}.admonitionContent_S0QG>:last-child,.collapsibleContent_i85q>:last-child,.footer__items,.searchResultItem_U687>h2{margin-bottom:0}.codeBlockStandalone_MEMb,[type=checkbox]{padding:0}.hero{align-items:center;background-color:var(--ifm-hero-background-color);color:var(--ifm-hero-text-color);display:flex;padding:4rem 2rem}.hero--primary{--ifm-hero-background-color:var(--ifm-color-primary);--ifm-hero-text-color:var(--ifm-font-color-base-inverse)}.hero--dark{--ifm-hero-background-color:#303846;--ifm-hero-text-color:var(--ifm-color-white)}.hero__title,.title_f1Hy{font-size:3rem}.hero__subtitle{font-size:1.5rem}.menu__list{margin:0;padding-left:0}.menu__caret,.menu__link{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu__list .menu__list{flex:0 0 100%;margin-top:.25rem;padding-left:var(--ifm-menu-link-padding-horizontal)}.menu__list-item:not(:first-child){margin-top:.25rem}.menu__list-item--collapsed .menu__list{height:0;overflow:hidden}.details_lb9f[data-collapsed=false].isBrowser_bmU9>summary:before,.details_lb9f[open]:not(.isBrowser_bmU9)>summary:before,.menu__list-item--collapsed .menu__caret:before,.menu__list-item--collapsed .menu__link--sublist:after{transform:rotate(90deg)}.menu__list-item-collapsible{display:flex;flex-wrap:wrap;position:relative}.menu__caret:hover,.menu__link:hover,.menu__list-item-collapsible--active,.menu__list-item-collapsible:hover{background:var(--ifm-menu-color-background-hover)}.menu__list-item-collapsible .menu__link--active,.menu__list-item-collapsible .menu__link:hover{background:none!important}.menu__caret,.menu__link{align-items:center;display:flex}.menu__link{color:var(--ifm-menu-color);flex:1;line-height:1.25}.menu__link:hover{color:var(--ifm-menu-color);text-decoration:none}.menu__caret:before,.menu__link--sublist-caret:after{content:"";height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast) linear;width:1.25rem;filter:var(--ifm-menu-link-sublist-icon-filter)}.menu__link--sublist-caret:after{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem;margin-left:auto;min-width:1.25rem}.menu__link--active,.menu__link--active:hover{color:var(--ifm-menu-color-active)}.navbar__brand,.navbar__link{color:var(--ifm-navbar-link-color)}.menu__link--active:not(.menu__link--sublist){background-color:var(--ifm-menu-color-background-active)}.menu__caret:before{background:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem}.navbar--dark,html[data-theme=dark]{--ifm-menu-link-sublist-icon-filter:invert(100%) sepia(94%) saturate(17%) hue-rotate(223deg) brightness(104%) contrast(98%)}.navbar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-navbar-shadow);height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar,.navbar>.container,.navbar>.container-fluid{display:flex}.navbar--fixed-top{position:sticky;top:0;z-index:var(--ifm-z-index-fixed)}.navbar-sidebar,.navbar-sidebar__backdrop{bottom:0;opacity:0;position:fixed;transition-duration:var(--ifm-transition-fast);transition-timing-function:ease-in-out;left:0;top:0;visibility:hidden}.navbar__inner{display:flex;flex-wrap:wrap;justify-content:space-between;width:100%}.navbar__brand{align-items:center;display:flex;margin-right:1rem;min-width:0}.navbar__brand:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.announcementBarContent_xLdY,.navbar__title{flex:1 1 auto}.navbar__toggle{display:none;margin-right:.5rem}.navbar__logo{flex:0 0 auto;height:2rem;margin-right:.5rem}.navbar__items{align-items:center;display:flex;flex:1;min-width:0}.navbar__items--center{flex:0 0 auto}.navbar__items--center .navbar__brand{margin:0}.navbar__items--center+.navbar__items--right{flex:1}.navbar__items--right{flex:0 0 auto;justify-content:flex-end}.navbar__items--right>:last-child{padding-right:0}.navbar__item{display:inline-block;padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}#nprogress,.navbar__item.dropdown .navbar__link:not([href]){pointer-events:none}.navbar__link--active,.navbar__link:hover{color:var(--ifm-navbar-link-hover-color);text-decoration:none}.navbar--dark,.navbar--primary{--ifm-menu-color:var(--ifm-color-gray-300);--ifm-navbar-link-color:var(--ifm-color-gray-100);--ifm-navbar-search-input-background-color:#ffffff1a;--ifm-navbar-search-input-placeholder-color:#ffffff80;color:var(--ifm-color-white)}.navbar--dark{--ifm-navbar-background-color:#242526;--ifm-menu-color-background-active:#ffffff0d;--ifm-navbar-search-input-color:var(--ifm-color-white)}.navbar--primary{--ifm-navbar-background-color:var(--ifm-color-primary);--ifm-navbar-link-hover-color:var(--ifm-color-white);--ifm-menu-color-active:var(--ifm-color-white);--ifm-navbar-search-input-color:var(--ifm-color-emphasis-500)}.navbar__search-input{-webkit-appearance:none;appearance:none;background:var(--ifm-navbar-search-input-background-color) var(--ifm-navbar-search-input-icon) no-repeat .75rem center/1rem 1rem;border:none;border-radius:2rem;color:var(--ifm-navbar-search-input-color);cursor:text;display:inline-block;font-size:.9rem;height:2rem;padding:0 .5rem 0 2.25rem;width:12.5rem}.navbar__search-input::placeholder{color:var(--ifm-navbar-search-input-placeholder-color)}.navbar-sidebar{background-color:var(--ifm-navbar-background-color);box-shadow:var(--ifm-global-shadow-md);transform:translate3d(-100%,0,0);transition-property:opacity,visibility,transform;width:var(--ifm-navbar-sidebar-width)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar__items{transform:translateZ(0)}.navbar-sidebar--show .navbar-sidebar,.navbar-sidebar--show .navbar-sidebar__backdrop{opacity:1;visibility:visible}.navbar-sidebar__backdrop{background-color:#0009;right:0;transition-property:opacity,visibility}.navbar-sidebar__brand{align-items:center;box-shadow:var(--ifm-navbar-shadow);display:flex;flex:1;height:var(--ifm-navbar-height);padding:var(--ifm-navbar-padding-vertical) var(--ifm-navbar-padding-horizontal)}.navbar-sidebar__items{display:flex;height:calc(100% - var(--ifm-navbar-height));transition:transform var(--ifm-transition-fast) ease-in-out}.navbar-sidebar__items--show-secondary{transform:translate3d(calc((var(--ifm-navbar-sidebar-width))*-1),0,0)}.navbar-sidebar__item{flex-shrink:0;padding:.5rem;width:calc(var(--ifm-navbar-sidebar-width))}.navbar-sidebar__back{background:var(--ifm-menu-color-background-active);font-size:15px;font-weight:var(--ifm-button-font-weight);margin:0 0 .2rem -.5rem;padding:.6rem 1.5rem;position:relative;text-align:left;top:-.5rem;width:calc(100% + 1rem)}.navbar-sidebar__close{display:flex;margin-left:auto}.pagination{column-gap:var(--ifm-pagination-page-spacing);display:flex;font-size:var(--ifm-pagination-font-size);padding-left:0}.pagination--sm{--ifm-pagination-font-size:0.8rem;--ifm-pagination-padding-horizontal:0.8rem;--ifm-pagination-padding-vertical:0.2rem}.pagination--lg{--ifm-pagination-font-size:1.2rem;--ifm-pagination-padding-horizontal:1.2rem;--ifm-pagination-padding-vertical:0.3rem}.pagination__item{display:inline-flex}.pagination__item>span{padding:var(--ifm-pagination-padding-vertical)}.pagination__item--active .pagination__link{color:var(--ifm-pagination-color-active)}.pagination__item--active .pagination__link,.pagination__item:not(.pagination__item--active):hover .pagination__link{background:var(--ifm-pagination-item-active-background)}.pagination__item--disabled,.pagination__item[disabled]{opacity:.25;pointer-events:none}.pagination__link{border-radius:var(--ifm-pagination-border-radius);color:var(--ifm-font-color-base);display:inline-block;padding:var(--ifm-pagination-padding-vertical) var(--ifm-pagination-padding-horizontal);transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination__link:hover,.sidebarItemLink_mo7H:hover{text-decoration:none}.pagination-nav{grid-gap:var(--ifm-spacing-horizontal);display:grid;gap:var(--ifm-spacing-horizontal);grid-template-columns:repeat(2,1fr)}.pagination-nav__link{border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-pagination-nav-border-radius);display:block;height:100%;line-height:var(--ifm-heading-line-height);padding:var(--ifm-global-spacing);transition:border-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.pagination-nav__link:hover{border-color:var(--ifm-pagination-nav-color-hover);text-decoration:none}.pagination-nav__link--next{grid-column:2/3;text-align:right}.pagination-nav__label{font-size:var(--ifm-h4-font-size);font-weight:var(--ifm-heading-font-weight);word-break:break-word}.pagination-nav__link--prev .pagination-nav__label:before{content:"« "}.pagination-nav__link--next .pagination-nav__label:after{content:" »"}.pagination-nav__sublabel{color:var(--ifm-color-content-secondary);font-size:var(--ifm-h5-font-size);font-weight:var(--ifm-font-weight-semibold);margin-bottom:.25rem}.pills__item,.tabs{font-weight:var(--ifm-font-weight-bold)}.pills{display:flex;gap:var(--ifm-pills-spacing);padding-left:0}.pills__item{border-radius:.5rem;cursor:pointer;display:inline-block;padding:.25rem 1rem;transition:background var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.tabs,:not(.containsTaskList_mC6p>li)>.containsTaskList_mC6p{padding-left:0}.pills__item--active{color:var(--ifm-pills-color-active)}.pills__item--active,.pills__item:not(.pills__item--active):hover{background:var(--ifm-pills-color-background-active)}.pills--block{justify-content:stretch}.pills--block .pills__item{flex-grow:1;text-align:center}.tabs{color:var(--ifm-tabs-color);display:flex;margin-bottom:0;overflow-x:auto}.tabs__item{border-bottom:3px solid #0000;border-radius:var(--ifm-global-radius);cursor:pointer;display:inline-flex;padding:var(--ifm-tabs-padding-vertical) var(--ifm-tabs-padding-horizontal);transition:background-color var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.tabs__item--active{border-bottom-color:var(--ifm-tabs-color-active-border);border-bottom-left-radius:0;border-bottom-right-radius:0;color:var(--ifm-tabs-color-active)}.tabs__item:hover{background-color:var(--ifm-hover-overlay)}.tabs--block{justify-content:stretch}.tabs--block .tabs__item{flex-grow:1;justify-content:center}html[data-theme=dark]{--ifm-color-scheme:dark;--ifm-color-emphasis-0:var(--ifm-color-gray-1000);--ifm-color-emphasis-100:var(--ifm-color-gray-900);--ifm-color-emphasis-200:var(--ifm-color-gray-800);--ifm-color-emphasis-300:var(--ifm-color-gray-700);--ifm-color-emphasis-400:var(--ifm-color-gray-600);--ifm-color-emphasis-600:var(--ifm-color-gray-400);--ifm-color-emphasis-700:var(--ifm-color-gray-300);--ifm-color-emphasis-800:var(--ifm-color-gray-200);--ifm-color-emphasis-900:var(--ifm-color-gray-100);--ifm-color-emphasis-1000:var(--ifm-color-gray-0);--ifm-background-color:#1b1b1d;--ifm-background-surface-color:#242526;--ifm-hover-overlay:#ffffff0d;--ifm-color-content:#e3e3e3;--ifm-color-content-secondary:#fff;--ifm-breadcrumb-separator-filter:invert(64%) sepia(11%) saturate(0%) hue-rotate(149deg) brightness(99%) contrast(95%);--ifm-code-background:#ffffff1a;--ifm-scrollbar-track-background-color:#444;--ifm-scrollbar-thumb-background-color:#686868;--ifm-scrollbar-thumb-hover-background-color:#7a7a7a;--ifm-table-stripe-background:#ffffff12;--ifm-toc-border-color:var(--ifm-color-emphasis-200);--ifm-color-primary-contrast-background:#102445;--ifm-color-primary-contrast-foreground:#ebf2fc;--ifm-color-secondary-contrast-background:#474748;--ifm-color-secondary-contrast-foreground:#fdfdfe;--ifm-color-success-contrast-background:#003100;--ifm-color-success-contrast-foreground:#e6f6e6;--ifm-color-info-contrast-background:#193c47;--ifm-color-info-contrast-foreground:#eef9fd;--ifm-color-warning-contrast-background:#4d3800;--ifm-color-warning-contrast-foreground:#fff8e6;--ifm-color-danger-contrast-background:#4b1113;--ifm-color-danger-contrast-foreground:#ffebec}#nprogress .bar{background:var(--docusaurus-progress-bar-color);height:2px;left:0;position:fixed;top:0;width:100%;z-index:1031}#nprogress .peg{box-shadow:0 0 10px var(--docusaurus-progress-bar-color),0 0 5px var(--docusaurus-progress-bar-color);height:100%;opacity:1;position:absolute;right:0;transform:rotate(3deg) translateY(-4px);width:100px}:root,[data-theme=dark]{--ifm-color-primary:#2196f3;--ifm-color-primary-dark:#1565c0;--ifm-color-primary-darker:#0d47a1;--ifm-color-primary-darkest:#002171;--ifm-color-primary-light:#64b5f6;--ifm-color-primary-lighter:#bbdefb;--ifm-color-primary-lightest:#e3f2fd;--ifm-code-font-size:95%;--docusaurus-highlighted-code-line-bg:#0000001a}div[class^=announcementBar_]{background:repeating-linear-gradient(-35deg,var(--ifm-color-primary-lighter),var(--ifm-color-primary-lighter) 20px,var(--ifm-color-primary-lightest) 10px,var(--ifm-color-primary-lightest) 40px);font-weight:700}h2,h3,h4,h5,h6{color:var(--ifm-color-primary-light)}@font-face{font-family:SourceHanSansCN;font-style:normal;font-weight:400;src:url(/assets/fonts/SourceHanSansCN-Regular-1235a610813e82ec7e42bbb8123b3d74.ttf) format("truetype")}body,h1,h2,h3,h4,h5,h6,p{font-family:Helvetica,SourceHanSansCN}body:not(.navigation-with-keyboard) :not(input):focus{outline:0}#__docusaurus-base-url-issue-banner-container,.docSidebarContainer_b6E3,.hideAction_vcyE>svg,.sidebarLogo_isFc,.themedImage_ToTc,[data-theme=dark] .lightToggleIcon_pyhR,[data-theme=light] .darkToggleIcon_wfgR,html[data-announcement-bar-initially-dismissed=true] .announcementBar_mb4j{display:none}.skipToContent_fXgn{background-color:var(--ifm-background-surface-color);color:var(--ifm-color-emphasis-900);left:100%;padding:calc(var(--ifm-global-spacing)/2) var(--ifm-global-spacing);position:fixed;top:1rem;z-index:calc(var(--ifm-z-index-fixed) + 1)}.skipToContent_fXgn:focus{box-shadow:var(--ifm-global-shadow-md);left:1rem}.closeButton_CVFx{line-height:0;padding:0}.content_knG7{font-size:85%;padding:5px 0;text-align:center}.content_knG7 a{color:inherit}.announcementBar_mb4j{align-items:center;background-color:var(--ifm-color-white);border-bottom:1px solid var(--ifm-color-emphasis-100);color:var(--ifm-color-black);display:flex;height:var(--docusaurus-announcement-bar-height)}.announcementBarPlaceholder_vyr4{flex:0 0 10px}.announcementBarClose_gvF7{align-self:stretch;flex:0 0 30px}.toggle_vylO{height:2rem;width:2rem}.toggleButton_gllP{align-items:center;border-radius:50%;display:flex;height:100%;justify-content:center;transition:background var(--ifm-transition-fast);width:100%}.toggleButton_gllP:hover{background:var(--ifm-color-emphasis-200)}.toggleButtonDisabled_aARS{cursor:not-allowed}.darkNavbarColorModeToggle_X3D1:hover{background:var(--ifm-color-gray-800)}[data-theme=dark] .themedImage--dark_i4oU,[data-theme=light] .themedImage--light_HNdA,html:not([data-theme]) .themedComponent--light_NU7w{display:initial}.iconExternalLink_nPIU{margin-left:.3rem}.iconLanguage_nlXk{margin-right:5px;vertical-align:text-bottom}.searchBar_RVTs .dropdownMenu_qbY6{background:var(--search-local-modal-background,#f5f6f7);border-radius:6px;box-shadow:var(--search-local-modal-shadow,inset 1px 1px 0 0 #ffffff80,0 3px 8px 0 #555a64);left:auto!important;margin-top:8px;padding:var(--search-local-spacing,12px);position:relative;right:0!important;width:var(--search-local-modal-width,560px)}html[data-theme=dark] .searchBar_RVTs .dropdownMenu_qbY6{background:var(--search-local-modal-background,var(--ifm-background-color));box-shadow:var(--search-local-modal-shadow,inset 1px 1px 0 0 #2c2e40,0 3px 8px 0 #000309)}.searchBar_RVTs .dropdownMenu_qbY6 .suggestion_fB_2{align-items:center;background:var(--search-local-hit-background,#fff);border-radius:4px;box-shadow:var(--search-local-hit-shadow,0 1px 3px 0 #d4d9e1);color:var(--search-local-hit-color,#444950);cursor:pointer;display:flex;flex-direction:row;height:var(--search-local-hit-height,56px);padding:0 var(--search-local-spacing,12px);width:100%}.hitTree_kk6K,.noResults_l6Q3{align-items:center;display:flex}html[data-theme=dark] .dropdownMenu_qbY6 .suggestion_fB_2{background:var(--search-local-hit-background,var(--ifm-color-emphasis-100));box-shadow:var(--search-local-hit-shadow,none);color:var(--search-local-hit-color,var(--ifm-font-color-base))}.searchBar_RVTs .dropdownMenu_qbY6 .suggestion_fB_2:not(:last-child){margin-bottom:4px}.searchBar_RVTs .dropdownMenu_qbY6 .suggestion_fB_2.cursor_eG29{background-color:var(--search-local-highlight-color,var(--ifm-color-primary))}.hitFooter_E9YW a,.hitIcon_a7Zy,.hitPath_ieM4,.hitTree_kk6K,.noResultsIcon_EBY5{color:var(--search-local-muted-color,#969faf)}html[data-theme=dark] .hitIcon_a7Zy,html[data-theme=dark] .hitPath_ieM4,html[data-theme=dark] .hitTree_kk6K,html[data-theme=dark] .noResultsIcon_EBY5{color:var(--search-local-muted-color,var(--ifm-color-secondary-darkest))}.hitTree_kk6K>svg{height:var(--search-local-hit-height,56px);opacity:.5;width:24px}.hitIcon_a7Zy,.hitTree_kk6K>svg{stroke-width:var(--search-local-icon-stroke-width,1.4)}.hitAction_NqkB,.hitIcon_a7Zy{height:20px;width:20px}.hitWrapper_sAK8{display:flex;flex:1 1 auto;flex-direction:column;font-weight:500;justify-content:center;margin:0 8px;overflow-x:hidden;width:80%}.hitWrapper_sAK8 mark{background:none;color:var(--search-local-highlight-color,var(--ifm-color-primary))}.hitTitle_vyVt{font-size:.9em}.hitPath_ieM4{font-size:.75em}.hitPath_ieM4,.hitTitle_vyVt{overflow-x:hidden;text-overflow:ellipsis;white-space:nowrap}.noResults_l6Q3{flex-direction:column;justify-content:center;padding:var(--search-local-spacing,12px) 0}.noResultsIcon_EBY5{margin-bottom:var(--search-local-spacing,12px)}.hitFooter_E9YW{font-size:.85em;margin-top:var(--search-local-spacing,12px);text-align:center}.cursor_eG29 .hideAction_vcyE>svg,.tocCollapsibleContent_vkbj a{display:block}.suggestion_fB_2.cursor_eG29,.suggestion_fB_2.cursor_eG29 .hitIcon_a7Zy,.suggestion_fB_2.cursor_eG29 .hitPath_ieM4,.suggestion_fB_2.cursor_eG29 .hitTree_kk6K,.suggestion_fB_2.cursor_eG29 mark{color:var(--search-local-hit-active-color,var(--ifm-color-white))!important}.searchBarContainer_NW3z{margin-left:16px}.searchBarContainer_NW3z .searchBarLoadingRing_YnHq{display:none;left:10px;position:absolute;top:6px}.searchBarContainer_NW3z .searchClearButton_qk4g{background:none;border:none;line-height:1rem;padding:0;position:absolute;right:.8rem;top:50%;transform:translateY(-50%)}.navbar__search{position:relative}.searchIndexLoading_EJ1f .navbar__search-input{background-image:none}.searchHintContainer_Pkmr{align-items:center;display:flex;gap:4px;height:100%;justify-content:center;pointer-events:none;position:absolute;right:10px;top:0}.searchHint_iIMx{background-color:var(--ifm-navbar-search-input-background-color);border:1px solid var(--ifm-color-emphasis-500);box-shadow:inset 0 -1px 0 var(--ifm-color-emphasis-500);color:var(--ifm-navbar-search-input-placeholder-color)}.loadingRing_RJI3{display:inline-block;height:20px;opacity:var(--search-local-loading-icon-opacity,.5);position:relative;width:20px}.loadingRing_RJI3 div{animation:1.2s cubic-bezier(.5,0,.5,1) infinite a;border:2px solid var(--search-load-loading-icon-color,var(--ifm-navbar-search-input-color));border-color:var(--search-load-loading-icon-color,var(--ifm-navbar-search-input-color)) #0000 #0000 #0000;border-radius:50%;display:block;height:16px;margin:2px;position:absolute;width:16px}.loadingRing_RJI3 div:first-child{animation-delay:-.45s}.loadingRing_RJI3 div:nth-child(2){animation-delay:-.3s}.loadingRing_RJI3 div:nth-child(3){animation-delay:-.15s}@keyframes a{0%{transform:rotate(0)}to{transform:rotate(1turn)}}.navbarHideable_m1mJ{transition:transform var(--ifm-transition-fast) ease}.navbarHidden_jGov{transform:translate3d(0,calc(-100% - 2px),0)}.errorBoundaryError_a6uf{color:red;white-space:pre-wrap}.footerLogoLink_BH7S{opacity:.5;transition:opacity var(--ifm-transition-fast) var(--ifm-transition-timing-default)}.footerLogoLink_BH7S:hover,.hash-link:focus,:hover>.hash-link{opacity:1}.mainWrapper_z2l0{display:flex;flex:1 0 auto;flex-direction:column}.docusaurus-mt-lg{margin-top:3rem}#__docusaurus{display:flex;flex-direction:column;min-height:100%}.searchContextInput_mXoe,.searchQueryInput_CFBF{background:var(--ifm-background-color);border:var(--ifm-global-border-width) solid var(--ifm-color-content-secondary);border-radius:var(--ifm-global-radius);color:var(--ifm-font-color-base);font-size:var(--ifm-font-size-base);margin-bottom:1rem;padding:.5rem;width:100%}.searchResultItem_U687{border-bottom:1px solid #dfe3e8;padding:1rem 0}.searchResultItemPath_uIbk{color:var(--ifm-color-content-secondary);font-size:.8rem;margin:.5rem 0 0}.searchResultItemSummary_oZHr{font-style:italic;margin:.5rem 0 0}.backToTopButton_sjWU{background-color:var(--ifm-color-emphasis-200);border-radius:50%;bottom:1.3rem;box-shadow:var(--ifm-global-shadow-lw);height:3rem;opacity:0;position:fixed;right:1.3rem;transform:scale(0);transition:all var(--ifm-transition-fast) var(--ifm-transition-timing-default);visibility:hidden;width:3rem;z-index:calc(var(--ifm-z-index-fixed) - 1)}.backToTopButton_sjWU:after{background-color:var(--ifm-color-emphasis-1000);content:" ";display:inline-block;height:100%;-webkit-mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;mask:var(--ifm-menu-link-sublist-icon) 50%/2rem 2rem no-repeat;width:100%}.backToTopButtonShow_xfvO{opacity:1;transform:scale(1);visibility:visible}[data-theme=dark]:root{--docusaurus-collapse-button-bg:#ffffff0d;--docusaurus-collapse-button-bg-hover:#ffffff1a}.collapseSidebarButton_PEFL{display:none;margin:0}.docMainContainer_gTbr,.docPage__5DB{display:flex;width:100%}.docPage__5DB{flex:1 0}.docsWrapper_BCFX{display:flex;flex:1 0 auto}.sidebar_re4s{overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 2rem)}.sidebarItemTitle_pO2u{font-size:var(--ifm-h3-font-size);font-weight:var(--ifm-font-weight-bold)}.container_mt6G,.sidebarItemList_Yudw{font-size:.9rem}.sidebarItem__DBe{margin-top:.7rem}.sidebarItemLink_mo7H{color:var(--ifm-font-color-base);display:block}.sidebarItemLinkActive_I1ZP{color:var(--ifm-color-primary)!important}.authorCol_Hf19{flex-grow:1!important;max-width:inherit!important}.imageOnlyAuthorRow_pa_O{display:flex;flex-flow:row wrap}.features_t9lD,}.buttons_AeoN{align-items:center;display:flex}.imageOnlyAuthorCol_G86a{margin-left:.3rem;margin-right:.3rem}.features_t9lD{padding:2rem 0;width:100%}.heroBanner_qdFl,[data-theme=dark]{overflow:hidden;padding:4rem 0;text-align:center;position:relative}.featureSvg_GfXr{height:200px;width:200px}.heroBanner_qdFl{-webkit-text-fill-color:#120c0c;background-color:#fff}[data-theme=dark]{background-color:#1b1b1d}.buttonGroup__atx button,.codeBlockContainer_Ckt0{background:var(--prism-background-color);color:var(--prism-color)}}.buttons_AeoN{justify-content:center}.codeBlockContainer_Ckt0{border-radius:var(--ifm-code-border-radius);box-shadow:var(--ifm-global-shadow-lw);margin-bottom:var(--ifm-leading)}.codeBlockContent_biex{border-radius:inherit;direction:ltr;position:relative}.codeBlockTitle_Ktv7{border-bottom:1px solid var(--ifm-color-emphasis-300);border-top-left-radius:inherit;border-top-right-radius:inherit;font-size:var(--ifm-code-font-size);font-weight:500;padding:.75rem var(--ifm-pre-padding)}.codeBlock_bY9V{--ifm-pre-background:var(--prism-background-color);margin:0;padding:0}.codeBlockTitle_Ktv7+.codeBlockContent_biex .codeBlock_bY9V{border-top-left-radius:0;border-top-right-radius:0}.codeBlockLines_e6Vv{float:left;font:inherit;min-width:100%;padding:var(--ifm-pre-padding)}.codeBlockLinesWithNumbering_o6Pm{display:table;padding:var(--ifm-pre-padding) 0}.buttonGroup__atx{column-gap:.2rem;display:flex;position:absolute;right:calc(var(--ifm-pre-padding)/2);top:calc(var(--ifm-pre-padding)/2)}.buttonGroup__atx button{align-items:center;border:1px solid var(--ifm-color-emphasis-300);border-radius:var(--ifm-global-radius);display:flex;line-height:0;opacity:0;padding:.4rem;transition:opacity var(--ifm-transition-fast) ease-in-out}.buttonGroup__atx button:focus-visible,.buttonGroup__atx button:hover{opacity:1!important}.theme-code-block:hover .buttonGroup__atx button{opacity:.4}.iconEdit_Z9Sw{margin-right:.3em;vertical-align:sub}:where(:root){--docusaurus-highlighted-code-line-bg:#484d5b}:where([data-theme=dark]){--docusaurus-highlighted-code-line-bg:#646464}.theme-code-block-highlighted-line{background-color:var(--docusaurus-highlighted-code-line-bg);display:block;margin:0 calc(var(--ifm-pre-padding)*-1);padding:0 var(--ifm-pre-padding)}.codeLine_lJS_{counter-increment:a;display:table-row}.codeLineNumber_Tfdd{background:var(--ifm-pre-background);display:table-cell;left:0;overflow-wrap:normal;padding:0 var(--ifm-pre-padding);position:sticky;text-align:right;width:1%}.codeLineNumber_Tfdd:before{content:counter(a);opacity:.4}.codeLineContent_feaV{padding-right:var(--ifm-pre-padding)}.tag_zVej{border:1px solid var(--docusaurus-tag-list-border);transition:border var(--ifm-transition-fast)}.tag_zVej:hover{--docusaurus-tag-list-border:var(--ifm-link-color);text-decoration:none}.tagRegular_sFm0{border-radius:var(--ifm-global-radius);font-size:90%;padding:.2rem .5rem .3rem}.tagWithCount_h2kH{align-items:center;border-left:0;display:flex;padding:0 .5rem 0 1rem;position:relative}.tagWithCount_h2kH:after,.tagWithCount_h2kH:before{border:1px solid var(--docusaurus-tag-list-border);content:"";position:absolute;top:50%;transition:inherit}.tagWithCount_h2kH:before{border-bottom:0;border-right:0;height:1.18rem;right:100%;transform:translate(50%,-50%) rotate(-45deg);width:1.18rem}.tagWithCount_h2kH:after{border-radius:50%;height:.5rem;left:0;transform:translateY(-50%);width:.5rem}.tagWithCount_h2kH span{background:var(--ifm-color-secondary);border-radius:var(--ifm-global-radius);color:var(--ifm-color-black);font-size:.7rem;line-height:1.2;margin-left:.3rem;padding:.1rem .4rem}.theme-code-block:hover .copyButtonCopied_obH4{opacity:1!important}.copyButtonIcons_eSgA{height:1.125rem;position:relative;width:1.125rem}.copyButtonIcon_y97N,.copyButtonSuccessIcon_LjdS{fill:currentColor;height:inherit;left:0;opacity:inherit;position:absolute;top:0;transition:all var(--ifm-transition-fast) ease;width:inherit}.copyButtonSuccessIcon_LjdS{color:#00d600;left:50%;opacity:0;top:50%;transform:translate(-50%,-50%) scale(.33)}.copyButtonCopied_obH4 .copyButtonIcon_y97N{opacity:0;transform:scale(.33)}.copyButtonCopied_obH4 .copyButtonSuccessIcon_LjdS{opacity:1;transform:translate(-50%,-50%) scale(1);transition-delay:75ms}.tags_jXut{display:inline}.tag_QGVx{display:inline-block;margin:0 .4rem .5rem 0}.lastUpdated_vwxv{font-size:smaller;font-style:italic;margin-top:.2rem}.tocCollapsibleButton_TO0P{align-items:center;display:flex;font-size:inherit;justify-content:space-between;padding:.4rem .8rem;width:100%}.tocCollapsibleButton_TO0P:after{background:var(--ifm-menu-link-sublist-icon) 50% 50%/2rem 2rem no-repeat;content:"";filter:var(--ifm-menu-link-sublist-icon-filter);height:1.25rem;transform:rotate(180deg);transition:transform var(--ifm-transition-fast);width:1.25rem}.tocCollapsibleButtonExpanded_MG3E:after,.tocCollapsibleExpanded_sAul{transform:none}.tocCollapsible_ETCw{background-color:var(--ifm-menu-color-background-active);border-radius:var(--ifm-global-radius);margin:1rem 0}.tocCollapsibleContent_vkbj>ul{border-left:none;border-top:1px solid var(--ifm-color-emphasis-300);font-size:15px;padding:.2rem 0}.tocCollapsibleContent_vkbj ul li{margin:.4rem .8rem}.wordWrapButtonIcon_Bwma{height:1.2rem;width:1.2rem}.details_lb9f{--docusaurus-details-summary-arrow-size:0.38rem;--docusaurus-details-transition:transform 200ms ease;--docusaurus-details-decoration-color:grey}.details_lb9f>summary{cursor:pointer;padding-left:1rem;position:relative}.details_lb9f>summary::-webkit-details-marker{display:none}.details_lb9f>summary:before{border-color:#0000 #0000 #0000 var(--docusaurus-details-decoration-color);border-style:solid;border-width:var(--docusaurus-details-summary-arrow-size);content:"";left:0;position:absolute;top:.45rem;transform:rotate(0);transform-origin:calc(var(--docusaurus-details-summary-arrow-size)/2) 50%;transition:var(--docusaurus-details-transition)}.collapsibleContent_i85q{border-top:1px solid var(--docusaurus-details-decoration-color);margin-top:1rem;padding-top:1rem}.details_b_Ee{--docusaurus-details-decoration-color:var(--ifm-alert-border-color);--docusaurus-details-transition:transform var(--ifm-transition-fast) ease;border:1px solid var(--ifm-alert-border-color);margin:0 0 var(--ifm-spacing-vertical)}.anchorWithStickyNavbar_LWe7{scroll-margin-top:calc(var(--ifm-navbar-height) + .5rem)}.anchorWithHideOnScrollNavbar_WYt5{scroll-margin-top:.5rem}.hash-link{opacity:0;padding-left:.5rem;transition:opacity var(--ifm-transition-fast);-webkit-user-select:none;user-select:none}.hash-link:before{content:"#"}.img_ev3q{height:auto}.admonition_LlT9{margin-bottom:1em}.admonitionHeading_tbUL{font:var(--ifm-heading-font-weight) var(--ifm-h5-font-size)/var(--ifm-heading-line-height) var(--ifm-heading-font-family);margin-bottom:.3rem}.admonitionHeading_tbUL code{text-transform:none}.admonitionIcon_kALy{display:inline-block;margin-right:.4em;vertical-align:middle}.admonitionIcon_kALy svg{fill:var(--ifm-alert-foreground-color);display:inline-block;height:1.6em;width:1.6em}.blogPostFooterDetailsFull_mRVl{flex-direction:column}.tableOfContents_bqdL{overflow-y:auto;position:sticky;top:calc(var(--ifm-navbar-height) + 1rem)}.breadcrumbHomeIcon_YNFT{height:1.1rem;position:relative;top:1px;vertical-align:top;width:1.1rem}.breadcrumbsContainer_Z_bl{--ifm-breadcrumb-size-multiplier:0.8;margin-bottom:.8rem}.mdxPageWrapper_j9I6{justify-content:center}@media (min-width:997px){.collapseSidebarButton_PEFL,.expandButton_m80_{background-color:var(--docusaurus-collapse-button-bg)}:root{--docusaurus-announcement-bar-height:30px}.announcementBarClose_gvF7,.announcementBarPlaceholder_vyr4{flex-basis:50px}.searchBox_ZlJk{padding:var(--ifm-navbar-item-padding-vertical) var(--ifm-navbar-item-padding-horizontal)}.collapseSidebarButton_PEFL{border:1px solid var(--ifm-toc-border-color);border-radius:0;bottom:0;display:block!important;height:40px;position:sticky}.collapseSidebarButtonIcon_kv0_{margin-top:4px;transform:rotate(180deg)}.expandButtonIcon_BlDH,[dir=rtl] .collapseSidebarButtonIcon_kv0_{transform:rotate(0)}.collapseSidebarButton_PEFL:focus,.collapseSidebarButton_PEFL:hover,.expandButton_m80_:focus,.expandButton_m80_:hover{background-color:var(--docusaurus-collapse-button-bg-hover)}.menuHtmlItem_M9Kj{padding:var(--ifm-menu-link-padding-vertical) var(--ifm-menu-link-padding-horizontal)}.menu_SIkG{flex-grow:1;padding:.5rem}@supports (scrollbar-gutter:stable){.menu_SIkG{padding:.5rem 0 .5rem .5rem;scrollbar-gutter:stable}}.menuWithAnnouncementBar_GW3s{margin-bottom:var(--docusaurus-announcement-bar-height)}.sidebar_njMd{display:flex;flex-direction:column;height:100%;padding-top:var(--ifm-navbar-height);width:var(--doc-sidebar-width)}.sidebarWithHideableNavbar_wUlq{padding-top:0}.sidebarHidden_VK0M{opacity:0;visibility:hidden}.sidebarLogo_isFc{align-items:center;color:inherit!important;display:flex!important;margin:0 var(--ifm-navbar-padding-horizontal);max-height:var(--ifm-navbar-height);min-height:var(--ifm-navbar-height);text-decoration:none!important}.sidebarLogo_isFc img{height:2rem;margin-right:.5rem}.expandButton_m80_{align-items:center;display:flex;height:100%;justify-content:center;position:absolute;right:0;top:0;transition:background-color var(--ifm-transition-fast) ease;width:100%}[dir=rtl] .expandButtonIcon_BlDH{transform:rotate(180deg)}.docSidebarContainer_b6E3{border-right:1px solid var(--ifm-toc-border-color);-webkit-clip-path:inset(0);clip-path:inset(0);display:block;margin-top:calc(var(--ifm-navbar-height)*-1);transition:width var(--ifm-transition-fast) ease;width:var(--doc-sidebar-width);will-change:width}.docSidebarContainerHidden_b3ry{cursor:pointer;width:var(--doc-sidebar-hidden-width)}.sidebarViewport_Xe31{height:100%;max-height:100vh;position:sticky;top:0}.docMainContainer_gTbr{flex-grow:1;max-width:calc(100% - var(--doc-sidebar-width))}.docMainContainerEnhanced_Uz_u{max-width:calc(100% - var(--doc-sidebar-hidden-width))}.docItemWrapperEnhanced_czyv{max-width:calc(var(--ifm-container-width) + var(--doc-sidebar-width))!important}.lastUpdated_vwxv{text-align:right}.tocMobile_ITEo{display:none}.docItemCol_VOVn{max-width:75%!important}}@media (min-width:1440px){.container{max-width:var(--ifm-container-width-xl)}}@media (max-width:996px){.col{--ifm-col-width:100%;flex-basis:var(--ifm-col-width);margin-left:0}.footer{--ifm-footer-padding-horizontal:0}.colorModeToggle_DEke,.footer__link-separator,.navbar__item,.sidebar_re4s,.tableOfContents_bqdL{display:none}.footer__col{margin-bottom:calc(var(--ifm-spacing-vertical)*3)}.footer__link-item{display:block}.hero{padding-left:0;padding-right:0}.navbar>.container,.navbar>.container-fluid{padding:0}.navbar__toggle{display:inherit}.navbar__search-input{width:9rem}.pills--block,.tabs--block{flex-direction:column}.searchBox_ZlJk{position:absolute;right:var(--ifm-navbar-padding-horizontal)}.docItemContainer_F8PC{padding:0 .3rem}}@media not (max-width:996px){.searchBar_RVTs.searchBarLeft_MXDe .dropdownMenu_qbY6{left:0!important;right:auto!important}}@media only screen and (max-width:996px){.searchQueryColumn_q7nx{max-width:60%!important}.searchContextColumn_oWAF{max-width:40%!important}}@media screen and (max-width:996px){.heroBanner_qdFl{padding:2rem}}@media (max-width:576px){.markdown h1:first-child{--ifm-h1-font-size:2rem}.markdown>h2{--ifm-h2-font-size:1.5rem}.markdown>h3{--ifm-h3-font-size:1.25rem}.navbar__search-input:not(:focus){width:2rem}.searchBar_RVTs .dropdownMenu_qbY6{max-width:calc(100vw - var(--ifm-navbar-padding-horizontal)*2);width:var(--search-local-modal-width-sm,340px)}.searchBarContainer_NW3z:not(.focused_OWtg) .searchClearButton_qk4g,.searchHintContainer_Pkmr{display:none}.title_f1Hy{font-size:2rem}}@media screen and (max-width:576px){.searchQueryColumn_q7nx{max-width:100%!important}.searchContextColumn_oWAF{max-width:100%!important;padding-left:var(--ifm-spacing-horizontal)!important}}@media (hover:hover){.backToTopButton_sjWU:hover{background-color:var(--ifm-color-emphasis-300)}}@media (pointer:fine){.thin-scrollbar{scrollbar-width:thin}.thin-scrollbar::-webkit-scrollbar{height:var(--ifm-scrollbar-size);width:var(--ifm-scrollbar-size)}.thin-scrollbar::-webkit-scrollbar-track{background:var(--ifm-scrollbar-track-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb{background:var(--ifm-scrollbar-thumb-background-color);border-radius:10px}.thin-scrollbar::-webkit-scrollbar-thumb:hover{background:var(--ifm-scrollbar-thumb-hover-background-color)}}@media (prefers-reduced-motion:reduce){:root{--ifm-transition-fast:0ms;--ifm-transition-slow:0ms}}@media print{.announcementBar_mb4j,.footer,.menu,.navbar,.pagination-nav,.table-of-contents,.tocMobile_ITEo{display:none}.tabs{page-break-inside:avoid}.codeBlockLines_e6Vv{white-space:pre-wrap}} \ No newline at end of file diff --git "a/blog/PyTroch\345\237\272\347\241\200/index.html" "b/blog/PyTroch\345\237\272\347\241\200/index.html" index 1d06d32a3..d6e06a61a 100644 --- "a/blog/PyTroch\345\237\272\347\241\200/index.html" +++ "b/blog/PyTroch\345\237\272\347\241\200/index.html" @@ -9,7 +9,7 @@ - + diff --git a/blog/archive/index.html b/blog/archive/index.html index a7a83bdb2..3252ad021 100644 --- a/blog/archive/index.html +++ b/blog/archive/index.html @@ -9,7 +9,7 @@ - + diff --git "a/blog/deep_learning/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" "b/blog/deep_learning/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" index 3455bcf5e..11a23fb2d 100644 --- "a/blog/deep_learning/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" +++ "b/blog/deep_learning/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" @@ -9,7 +9,7 @@ - + diff --git a/blog/index.html b/blog/index.html index 9548f06a1..22a7d14d1 100644 --- a/blog/index.html +++ b/blog/index.html @@ -9,7 +9,7 @@ - + diff --git "a/blog/\346\225\260\345\255\246\345\237\272\347\241\200/index.html" "b/blog/\346\225\260\345\255\246\345\237\272\347\241\200/index.html" index e0db9e411..88cddc296 100644 --- "a/blog/\346\225\260\345\255\246\345\237\272\347\241\200/index.html" +++ "b/blog/\346\225\260\345\255\246\345\237\272\347\241\200/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/blog/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" "b/blog/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" index ff07a9891..15d678794 100644 --- "a/blog/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" +++ "b/blog/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/blog/\347\220\206\350\256\272\347\237\245\350\257\206/index.html" "b/blog/\347\220\206\350\256\272\347\237\245\350\257\206/index.html" index e46eaafa5..2bfabbae5 100644 --- "a/blog/\347\220\206\350\256\272\347\237\245\350\257\206/index.html" +++ "b/blog/\347\220\206\350\256\272\347\237\245\350\257\206/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Algorithms/STL\346\250\241\346\235\277/index.html" "b/docs/Algorithms/STL\346\250\241\346\235\277/index.html" index 8a7f81d37..07946990d 100644 --- "a/docs/Algorithms/STL\346\250\241\346\235\277/index.html" +++ "b/docs/Algorithms/STL\346\250\241\346\235\277/index.html" @@ -9,7 +9,7 @@ - + diff --git a/docs/Algorithms/intro/index.html b/docs/Algorithms/intro/index.html index c01f9b78a..5eb905537 100644 --- a/docs/Algorithms/intro/index.html +++ b/docs/Algorithms/intro/index.html @@ -9,7 +9,7 @@ - + diff --git "a/docs/Algorithms/\346\234\272\350\257\225\346\212\200\345\267\247\344\270\216STL/index.html" "b/docs/Algorithms/\346\234\272\350\257\225\346\212\200\345\267\247\344\270\216STL/index.html" index e429497fd..c8a5a14a1 100644 --- "a/docs/Algorithms/\346\234\272\350\257\225\346\212\200\345\267\247\344\270\216STL/index.html" +++ "b/docs/Algorithms/\346\234\272\350\257\225\346\212\200\345\267\247\344\270\216STL/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Algorithms/\351\242\230\350\247\243/\344\270\200\347\273\264\345\211\215\347\274\200\345\222\214\357\274\210\345\210\267\345\207\272\344\270\200\351\201\223\345\242\231\357\274\211/index.html" "b/docs/Algorithms/\351\242\230\350\247\243/\344\270\200\347\273\264\345\211\215\347\274\200\345\222\214\357\274\210\345\210\267\345\207\272\344\270\200\351\201\223\345\242\231\357\274\211/index.html" index 62956d529..fdc379526 100644 --- "a/docs/Algorithms/\351\242\230\350\247\243/\344\270\200\347\273\264\345\211\215\347\274\200\345\222\214\357\274\210\345\210\267\345\207\272\344\270\200\351\201\223\345\242\231\357\274\211/index.html" +++ "b/docs/Algorithms/\351\242\230\350\247\243/\344\270\200\347\273\264\345\211\215\347\274\200\345\222\214\357\274\210\345\210\267\345\207\272\344\270\200\351\201\223\345\242\231\357\274\211/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Algorithms/\351\242\230\350\247\243/\345\217\215\345\272\217\350\276\223\345\207\272/index.html" "b/docs/Algorithms/\351\242\230\350\247\243/\345\217\215\345\272\217\350\276\223\345\207\272/index.html" index 7fd0cad97..549d1dda3 100644 --- "a/docs/Algorithms/\351\242\230\350\247\243/\345\217\215\345\272\217\350\276\223\345\207\272/index.html" +++ "b/docs/Algorithms/\351\242\230\350\247\243/\345\217\215\345\272\217\350\276\223\345\207\272/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Algorithms/\351\242\230\350\247\243/\346\216\222\345\210\227\347\273\204\345\220\210\357\274\210\346\261\20230\347\232\204\345\200\215\346\225\260\357\274\211/index.html" "b/docs/Algorithms/\351\242\230\350\247\243/\346\216\222\345\210\227\347\273\204\345\220\210\357\274\210\346\261\20230\347\232\204\345\200\215\346\225\260\357\274\211/index.html" index 868bc4172..a68770560 100644 --- "a/docs/Algorithms/\351\242\230\350\247\243/\346\216\222\345\210\227\347\273\204\345\220\210\357\274\210\346\261\20230\347\232\204\345\200\215\346\225\260\357\274\211/index.html" +++ "b/docs/Algorithms/\351\242\230\350\247\243/\346\216\222\345\210\227\347\273\204\345\220\210\357\274\210\346\261\20230\347\232\204\345\200\215\346\225\260\357\274\211/index.html" @@ -9,7 +9,7 @@ - + diff --git a/docs/Deep Learning/intro/index.html b/docs/Deep Learning/intro/index.html index 88ae24c56..31fae61a7 100644 --- a/docs/Deep Learning/intro/index.html +++ b/docs/Deep Learning/intro/index.html @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/K-fold Cross-validation/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/K-fold Cross-validation/index.html" index 27ef4fdf0..069183f38 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/K-fold Cross-validation/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/K-fold Cross-validation/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/Logistic Regression/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/Logistic Regression/index.html" index eb51987ec..10679ca07 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/Logistic Regression/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/Logistic Regression/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/PyTroch\345\237\272\347\241\200/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/PyTroch\345\237\272\347\241\200/index.html" index 3365d01c2..2bf0f9a8a 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/PyTroch\345\237\272\347\241\200/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/PyTroch\345\237\272\347\241\200/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\344\273\216\345\205\250\350\277\236\346\216\245\345\210\260\345\215\267\347\247\257/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\344\273\216\345\205\250\350\277\236\346\216\245\345\210\260\345\215\267\347\247\257/index.html" index 1e3186f53..d14bd6e60 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\344\273\216\345\205\250\350\277\236\346\216\245\345\210\260\345\215\267\347\247\257/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\344\273\216\345\205\250\350\277\236\346\216\245\345\210\260\345\215\267\347\247\257/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\215\267\347\247\257\345\261\202/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\215\267\347\247\257\345\261\202/index.html" index 995c67849..1a6d599b3 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\215\267\347\247\257\345\261\202/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\215\267\347\247\257\345\261\202/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\257\271\344\272\216\346\255\243\345\210\231\345\214\226\347\232\204\347\220\206\350\247\243/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\257\271\344\272\216\346\255\243\345\210\231\345\214\226\347\232\204\347\220\206\350\247\243/index.html" index 4ede922ea..ed7b63909 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\257\271\344\272\216\346\255\243\345\210\231\345\214\226\347\232\204\347\220\206\350\247\243/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\345\257\271\344\272\216\346\255\243\345\210\231\345\214\226\347\232\204\347\220\206\350\247\243/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\255\243\345\210\231\345\214\226\344\270\216\346\235\203\351\207\215\350\241\260\351\200\200/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\255\243\345\210\231\345\214\226\344\270\216\346\235\203\351\207\215\350\241\260\351\200\200/index.html" index 235352b86..4240af58a 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\255\243\345\210\231\345\214\226\344\270\216\346\235\203\351\207\215\350\241\260\351\200\200/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\255\243\345\210\231\345\214\226\344\270\216\346\235\203\351\207\215\350\241\260\351\200\200/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\261\240\345\214\226\345\261\202/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\261\240\345\214\226\345\261\202/index.html" index ce8505ef4..3e15e0eee 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\261\240\345\214\226\345\261\202/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\261\240\345\214\226\345\261\202/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\267\261\345\272\246\345\217\257\345\210\206\347\246\273\345\215\267\347\247\257/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\267\261\345\272\246\345\217\257\345\210\206\347\246\273\345\215\267\347\247\257/index.html" index 0ebd67c8e..bd43688b8 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\267\261\345\272\246\345\217\257\345\210\206\347\246\273\345\215\267\347\247\257/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\267\261\345\272\246\345\217\257\345\210\206\347\246\273\345\215\267\347\247\257/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" index 415a50115..0e2fa9fba 100644 --- "a/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" +++ "b/docs/Deep Learning/\345\237\272\347\241\200\347\237\245\350\257\206/\346\277\200\346\264\273\345\207\275\346\225\260\344\270\216Loss\347\232\204\346\242\257\345\272\246/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\345\256\236\347\224\250\346\212\200\345\267\247/Visdom\345\217\257\350\247\206\345\214\226/index.html" "b/docs/Deep Learning/\345\256\236\347\224\250\346\212\200\345\267\247/Visdom\345\217\257\350\247\206\345\214\226/index.html" index 06e11c564..714e12f11 100644 --- "a/docs/Deep Learning/\345\256\236\347\224\250\346\212\200\345\267\247/Visdom\345\217\257\350\247\206\345\214\226/index.html" +++ "b/docs/Deep Learning/\345\256\236\347\224\250\346\212\200\345\267\247/Visdom\345\217\257\350\247\206\345\214\226/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/AlexNet/index.html" "b/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/AlexNet/index.html" index 8ad75e9cc..e40ee89f9 100644 --- "a/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/AlexNet/index.html" +++ "b/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/AlexNet/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/LeNet/index.html" "b/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/LeNet/index.html" index 03ce22a67..16e86d508 100644 --- "a/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/LeNet/index.html" +++ "b/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/LeNet/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/Perceptron/index.html" "b/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/Perceptron/index.html" index 3e77634c1..7c3043761 100644 --- "a/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/Perceptron/index.html" +++ "b/docs/Deep Learning/\347\273\217\345\205\270\346\250\241\345\236\213/Perceptron/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Attention Is All You Need/index.html" "b/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Attention Is All You Need/index.html" index 3a2080b31..d8dcb949c 100644 --- "a/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Attention Is All You Need/index.html" +++ "b/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Attention Is All You Need/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Self-Attention/index.html" "b/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Self-Attention/index.html" index 302657a0d..37de6f3a4 100644 --- "a/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Self-Attention/index.html" +++ "b/docs/Deep Learning/\350\256\272\346\226\207\347\254\224\350\256\260/Self-Attention/index.html" @@ -9,7 +9,7 @@ - + diff --git a/docs/Linux/intro/index.html b/docs/Linux/intro/index.html index f8611bf9a..1eb97469e 100644 --- a/docs/Linux/intro/index.html +++ b/docs/Linux/intro/index.html @@ -9,7 +9,7 @@ - + diff --git "a/docs/Linux/\345\256\236\347\224\250\345\267\245\345\205\267/\347\273\210\347\253\257\344\273\243\347\220\206/index.html" "b/docs/Linux/\345\256\236\347\224\250\345\267\245\345\205\267/\347\273\210\347\253\257\344\273\243\347\220\206/index.html" index 94f6675fe..91b204b5d 100644 --- "a/docs/Linux/\345\256\236\347\224\250\345\267\245\345\205\267/\347\273\210\347\253\257\344\273\243\347\220\206/index.html" +++ "b/docs/Linux/\345\256\236\347\224\250\345\267\245\345\205\267/\347\273\210\347\253\257\344\273\243\347\220\206/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Linux/\345\256\242\345\210\266\345\214\226/\345\246\202\344\275\225\350\256\251\344\275\240\347\232\204KDE\347\234\213\350\265\267\346\235\245\346\233\264\345\203\217macOS/index.html" "b/docs/Linux/\345\256\242\345\210\266\345\214\226/\345\246\202\344\275\225\350\256\251\344\275\240\347\232\204KDE\347\234\213\350\265\267\346\235\245\346\233\264\345\203\217macOS/index.html" index ce538d2b9..74fa09dcc 100644 --- "a/docs/Linux/\345\256\242\345\210\266\345\214\226/\345\246\202\344\275\225\350\256\251\344\275\240\347\232\204KDE\347\234\213\350\265\267\346\235\245\346\233\264\345\203\217macOS/index.html" +++ "b/docs/Linux/\345\256\242\345\210\266\345\214\226/\345\246\202\344\275\225\350\256\251\344\275\240\347\232\204KDE\347\234\213\350\265\267\346\235\245\346\233\264\345\203\217macOS/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Linux/\351\227\256\351\242\230\350\247\243\345\206\263/\345\217\214\347\263\273\347\273\237\346\214\202\350\275\275Windows\347\243\201\347\233\230\344\270\272\345\217\252\350\257\273\346\226\207\344\273\266/index.html" "b/docs/Linux/\351\227\256\351\242\230\350\247\243\345\206\263/\345\217\214\347\263\273\347\273\237\346\214\202\350\275\275Windows\347\243\201\347\233\230\344\270\272\345\217\252\350\257\273\346\226\207\344\273\266/index.html" index a0244d3b5..ddcd33145 100644 --- "a/docs/Linux/\351\227\256\351\242\230\350\247\243\345\206\263/\345\217\214\347\263\273\347\273\237\346\214\202\350\275\275Windows\347\243\201\347\233\230\344\270\272\345\217\252\350\257\273\346\226\207\344\273\266/index.html" +++ "b/docs/Linux/\351\227\256\351\242\230\350\247\243\345\206\263/\345\217\214\347\263\273\347\273\237\346\214\202\350\275\275Windows\347\243\201\347\233\230\344\270\272\345\217\252\350\257\273\346\226\207\344\273\266/index.html" @@ -9,7 +9,7 @@ - + diff --git a/docs/Others/intro/index.html b/docs/Others/intro/index.html index ef0385603..99b5f2139 100644 --- a/docs/Others/intro/index.html +++ b/docs/Others/intro/index.html @@ -9,7 +9,7 @@ - + diff --git "a/docs/Others/\345\215\232\345\256\242\346\220\255\345\273\272/\345\221\212\347\244\272\346\240\217/index.html" "b/docs/Others/\345\215\232\345\256\242\346\220\255\345\273\272/\345\221\212\347\244\272\346\240\217/index.html" index 77853d553..201cf6ae7 100644 --- "a/docs/Others/\345\215\232\345\256\242\346\220\255\345\273\272/\345\221\212\347\244\272\346\240\217/index.html" +++ "b/docs/Others/\345\215\232\345\256\242\346\220\255\345\273\272/\345\221\212\347\244\272\346\240\217/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/Others/\351\235\242\350\257\225/\350\246\201\345\207\206\345\244\207\347\232\204\351\227\256\351\242\230/index.html" "b/docs/Others/\351\235\242\350\257\225/\350\246\201\345\207\206\345\244\207\347\232\204\351\227\256\351\242\230/index.html" index 5cc6e186a..aae15dd42 100644 --- "a/docs/Others/\351\235\242\350\257\225/\350\246\201\345\207\206\345\244\207\347\232\204\351\227\256\351\242\230/index.html" +++ "b/docs/Others/\351\235\242\350\257\225/\350\246\201\345\207\206\345\244\207\347\232\204\351\227\256\351\242\230/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/intro/index.html" "b/docs/\346\216\250\345\205\215/intro/index.html" index 24853ce9f..de6939966 100644 --- "a/docs/\346\216\250\345\205\215/intro/index.html" +++ "b/docs/\346\216\250\345\205\215/intro/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\345\244\217\344\273\244\350\220\245\351\235\242\350\257\225\346\225\260\345\255\246\351\203\250\345\210\206\345\244\215\344\271\240/index.html" "b/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\345\244\217\344\273\244\350\220\245\351\235\242\350\257\225\346\225\260\345\255\246\351\203\250\345\210\206\345\244\215\344\271\240/index.html" index 805509a0f..d3d9e3318 100644 --- "a/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\345\244\217\344\273\244\350\220\245\351\235\242\350\257\225\346\225\260\345\255\246\351\203\250\345\210\206\345\244\215\344\271\240/index.html" +++ "b/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\345\244\217\344\273\244\350\220\245\351\235\242\350\257\225\346\225\260\345\255\246\351\203\250\345\210\206\345\244\215\344\271\240/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\346\246\202\347\216\207\350\256\272/index.html" "b/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\346\246\202\347\216\207\350\256\272/index.html" index 94163b445..2e9858f9f 100644 --- "a/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\346\246\202\347\216\207\350\256\272/index.html" +++ "b/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\346\246\202\347\216\207\350\256\272/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\347\272\277\346\200\247\344\273\243\346\225\260/index.html" "b/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\347\272\277\346\200\247\344\273\243\346\225\260/index.html" index 66c7b932c..b3a23b7a4 100644 --- "a/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\347\272\277\346\200\247\344\273\243\346\225\260/index.html" +++ "b/docs/\346\216\250\345\205\215/\346\225\260\345\255\246/\347\272\277\346\200\247\344\273\243\346\225\260/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/\346\234\272\350\257\225/\345\244\247\346\225\260\351\231\244\346\263\225/index.html" "b/docs/\346\216\250\345\205\215/\346\234\272\350\257\225/\345\244\247\346\225\260\351\231\244\346\263\225/index.html" index 46e49b2d8..d07c4e459 100644 --- "a/docs/\346\216\250\345\205\215/\346\234\272\350\257\225/\345\244\247\346\225\260\351\231\244\346\263\225/index.html" +++ "b/docs/\346\216\250\345\205\215/\346\234\272\350\257\225/\345\244\247\346\225\260\351\231\244\346\263\225/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/\347\256\200\345\216\206/\347\256\200\345\216\206\351\235\242\350\257\225\345\207\206\345\244\207/index.html" "b/docs/\346\216\250\345\205\215/\347\256\200\345\216\206/\347\256\200\345\216\206\351\235\242\350\257\225\345\207\206\345\244\207/index.html" index fed958a49..bb4dd810f 100644 --- "a/docs/\346\216\250\345\205\215/\347\256\200\345\216\206/\347\256\200\345\216\206\351\235\242\350\257\225\345\207\206\345\244\207/index.html" +++ "b/docs/\346\216\250\345\205\215/\347\256\200\345\216\206/\347\256\200\345\216\206\351\235\242\350\257\225\345\207\206\345\244\207/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\346\216\250\345\205\215/\350\256\241\347\256\227\346\234\272\345\237\272\347\241\200\347\273\274\345\220\210/\346\225\260\346\215\256\347\273\223\346\236\204/index.html" "b/docs/\346\216\250\345\205\215/\350\256\241\347\256\227\346\234\272\345\237\272\347\241\200\347\273\274\345\220\210/\346\225\260\346\215\256\347\273\223\346\236\204/index.html" index 6a01de418..de39a0b9b 100644 --- "a/docs/\346\216\250\345\205\215/\350\256\241\347\256\227\346\234\272\345\237\272\347\241\200\347\273\274\345\220\210/\346\225\260\346\215\256\347\273\223\346\236\204/index.html" +++ "b/docs/\346\216\250\345\205\215/\350\256\241\347\256\227\346\234\272\345\237\272\347\241\200\347\273\274\345\220\210/\346\225\260\346\215\256\347\273\223\346\236\204/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/intro/index.html" "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/intro/index.html" index 74bbeb504..76ff9537a 100644 --- "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/intro/index.html" +++ "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/intro/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/GeekOS project 0/index.html" "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/GeekOS project 0/index.html" index 2c536ee04..64ecfd344 100644 --- "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/GeekOS project 0/index.html" +++ "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/GeekOS project 0/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/Linux\347\263\273\347\273\237\344\270\213GeekOS\347\232\204\347\216\257\345\242\203\351\205\215\347\275\256/index.html" "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/Linux\347\263\273\347\273\237\344\270\213GeekOS\347\232\204\347\216\257\345\242\203\351\205\215\347\275\256/index.html" index 7738f99ee..b82dae32e 100644 --- "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/Linux\347\263\273\347\273\237\344\270\213GeekOS\347\232\204\347\216\257\345\242\203\351\205\215\347\275\256/index.html" +++ "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\346\223\215\344\275\234\347\263\273\347\273\237\350\257\276\350\256\276/Linux\347\263\273\347\273\237\344\270\213GeekOS\347\232\204\347\216\257\345\242\203\351\205\215\347\275\256/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\347\274\226\350\257\221\345\216\237\347\220\206/\347\274\226\350\257\221\345\216\237\347\220\206\345\244\215\344\271\240\347\254\224\350\256\260/index.html" "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\347\274\226\350\257\221\345\216\237\347\220\206/\347\274\226\350\257\221\345\216\237\347\220\206\345\244\215\344\271\240\347\254\224\350\256\260/index.html" index 662b885cd..7f4c48135 100644 --- "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\347\274\226\350\257\221\345\216\237\347\220\206/\347\274\226\350\257\221\345\216\237\347\220\206\345\244\215\344\271\240\347\254\224\350\256\260/index.html" +++ "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\347\274\226\350\257\221\345\216\237\347\220\206/\347\274\226\350\257\221\345\216\237\347\220\206\345\244\215\344\271\240\347\254\224\350\256\260/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\344\275\223\347\263\273\347\273\223\346\236\204/\344\275\223\347\263\273\347\273\223\346\236\204\345\244\215\344\271\240\347\254\224\350\256\260/index.html" "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\344\275\223\347\263\273\347\273\223\346\236\204/\344\275\223\347\263\273\347\273\223\346\236\204\345\244\215\344\271\240\347\254\224\350\256\260/index.html" index 0517aeed5..0d9e4bad1 100644 --- "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\344\275\223\347\263\273\347\273\223\346\236\204/\344\275\223\347\263\273\347\273\223\346\236\204\345\244\215\344\271\240\347\254\224\350\256\260/index.html" +++ "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\344\275\223\347\263\273\347\273\223\346\236\204/\344\275\223\347\263\273\347\273\223\346\236\204\345\244\215\344\271\240\347\254\224\350\256\260/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\345\233\276\345\275\242\345\255\246/Transformer and self-attention/index.html" "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\345\233\276\345\275\242\345\255\246/Transformer and self-attention/index.html" index 9dbce2eaa..a1d80459a 100644 --- "a/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\345\233\276\345\275\242\345\255\246/Transformer and self-attention/index.html" +++ "b/docs/\350\257\276\347\250\213\345\255\246\344\271\240/\350\256\241\347\256\227\346\234\272\345\233\276\345\275\242\345\255\246/Transformer and self-attention/index.html" @@ -9,7 +9,7 @@ - + diff --git "a/docs/\351\270\243\350\260\242/intro/index.html" "b/docs/\351\270\243\350\260\242/intro/index.html" index 0f80ed58e..df70884df 100644 --- "a/docs/\351\270\243\350\260\242/intro/index.html" +++ "b/docs/\351\270\243\350\260\242/intro/index.html" @@ -9,7 +9,7 @@ - + diff --git a/img/apple.svg b/img/apple.svg new file mode 100644 index 000000000..23cb0c659 --- /dev/null +++ b/img/apple.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/img/avengers.svg b/img/avengers.svg deleted file mode 100644 index 8dac54ed4..000000000 --- a/img/avengers.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/index.html b/index.html index e19280787..dbf244c4e 100644 --- a/index.html +++ b/index.html @@ -9,7 +9,7 @@ - + diff --git a/markdown-page/index.html b/markdown-page/index.html index 5068d3cf5..d3515919a 100644 --- a/markdown-page/index.html +++ b/markdown-page/index.html @@ -9,7 +9,7 @@ - + diff --git a/search-index.json b/search-index.json index b13d725c0..8ec56ffdb 100644 --- a/search-index.json +++ b/search-index.json @@ -1 +1 @@ -[{"documents":[{"i":1,"t":"","u":"/blog/archive","b":[]},{"i":2,"t":"激活函数与Loss的梯度","u":"/blog/激活函数与Loss的梯度","b":[]},{"i":8,"t":"理论基础","u":"/blog/理论知识","b":[]},{"i":10,"t":"PyTorch基础","u":"/blog/PyTroch基础","b":[]},{"i":14,"t":"基础数学知识","u":"/blog/数学基础","b":[]},{"i":18,"t":"激活函数与Loss的梯度","u":"/blog/deep_learning/激活函数与Loss的梯度","b":[]},{"i":24,"t":"编译原理笔记","u":"/docs/课程学习/编译原理/编译原理复习笔记","b":["编译原理"]},{"i":82,"t":"Linux系统下GeekOS的环境配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","b":["操作系统课设"]},{"i":96,"t":"体系结构复习笔记","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","b":["计算机体系结构"]},{"i":104,"t":"Transformer and self-attention","u":"/docs/课程学习/计算机图形学/Transformer and self-attention","b":["计算机图形学"]},{"i":107,"t":"GeekOS project 0的实现","u":"/docs/课程学习/操作系统课设/GeekOS project 0","b":["操作系统课设"]},{"i":113,"t":"Welcome","u":"/docs/课程学习/intro","b":["课程学习"]},{"i":117,"t":"鸣谢","u":"/docs/鸣谢/intro","b":["饮水思源"]},{"i":119,"t":"大数除法","u":"/docs/推免/机试/大数除法","b":["机试"]},{"i":127,"t":"数据结构","u":"/docs/推免/计算机基础综合/数据结构","b":["计算机基础综合"]},{"i":132,"t":"概率论","u":"/docs/推免/数学/概率论","b":["数学"]},{"i":135,"t":"简历面试准备","u":"/docs/推免/简历/简历面试准备","b":["简历"]},{"i":177,"t":"夏令营面试数学部分复习","u":"/docs/推免/数学/夏令营面试数学部分复习","b":["数学"]},{"i":182,"t":"Welcome","u":"/docs/推免/intro","b":["推免"]},{"i":186,"t":"线性代数","u":"/docs/推免/数学/线性代数","b":["数学"]},{"i":192,"t":"反序输出","u":"/docs/Algorithms/题解/反序输出","b":["题解"]},{"i":198,"t":"Welcome","u":"/docs/Algorithms/intro","b":["算法"]},{"i":202,"t":"一维前缀和(刷出一道墙)","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","b":["题解"]},{"i":208,"t":"排列组合(求30的倍数)","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","b":["题解"]},{"i":214,"t":"机试技巧与STL","u":"/docs/Algorithms/机试技巧与STL","b":[]},{"i":281,"t":"从全连接到卷积","u":"/docs/Deep Learning/基础知识/从全连接到卷积","b":["基础知识"]},{"i":288,"t":"STL模板","u":"/docs/Algorithms/STL模板","b":[]},{"i":310,"t":"池化层","u":"/docs/Deep Learning/基础知识/池化层","b":["基础知识"]},{"i":317,"t":"卷积层","u":"/docs/Deep Learning/基础知识/卷积层","b":["基础知识"]},{"i":322,"t":"激活函数与Loss的梯度","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","b":["基础知识"]},{"i":333,"t":"对于正则化的理解","u":"/docs/Deep Learning/基础知识/对于正则化的理解","b":["基础知识"]},{"i":349,"t":"深度可分离卷积","u":"/docs/Deep Learning/基础知识/深度可分离卷积","b":["基础知识"]},{"i":357,"t":"正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","b":["基础知识"]},{"i":365,"t":"K-fold cross-validation","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","b":["基础知识"]},{"i":372,"t":"AlexNet","u":"/docs/Deep Learning/经典模型/AlexNet","b":["经典模型"]},{"i":379,"t":"LeNet","u":"/docs/Deep Learning/经典模型/LeNet","b":["经典模型"]},{"i":386,"t":"关于Logistic Regression","u":"/docs/Deep Learning/基础知识/Logistic Regression","b":["基础知识"]},{"i":395,"t":"PyTorch基础","u":"/docs/Deep Learning/基础知识/PyTroch基础","b":["基础知识"]},{"i":398,"t":"Perceptron","u":"/docs/Deep Learning/经典模型/Perceptron","b":["经典模型"]},{"i":405,"t":"Attention Is All You Need","u":"/docs/Deep Learning/论文笔记/Attention Is All You Need","b":["论文笔记"]},{"i":406,"t":"Self-Attention","u":"/docs/Deep Learning/论文笔记/Self-Attention","b":["论文笔记"]},{"i":412,"t":"Welcome","u":"/docs/Deep Learning/intro","b":["深度学习"]},{"i":416,"t":"Visdom可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","b":["实用技巧"]},{"i":427,"t":"终端代理","u":"/docs/Linux/实用工具/终端代理","b":["实用工具"]},{"i":435,"t":"如何让你的Kde Plasma看起来更像macOS","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","b":["客制化"]},{"i":440,"t":"挂载Windows磁盘为只读文件","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","b":["问题解决"]},{"i":445,"t":"Welcome","u":"/docs/Others/intro","b":["其他"]},{"i":449,"t":"Welcome","u":"/docs/Linux/intro","b":["Linux"]},{"i":453,"t":"告示栏","u":"/docs/Others/博客搭建/告示栏","b":["博客搭建"]},{"i":455,"t":"要准备的问题","u":"/docs/Others/面试/要准备的问题","b":["面试"]}],"index":{"version":"2.3.9","fields":["t"],"fieldVectors":[["t/1",[]],["t/2",[0,2.875]],["t/8",[1,0.878]],["t/10",[2,3.236]],["t/14",[1,0.878]],["t/18",[0,2.875]],["t/24",[1,0.878]],["t/82",[3,3.784]],["t/96",[1,0.878]],["t/104",[4,2.185,5,1.869,6,1.66]],["t/107",[7,2.185,8,2.185,9,2.185]],["t/113",[10,2.211]],["t/117",[1,0.878]],["t/119",[1,0.878]],["t/127",[1,0.878]],["t/132",[1,0.878]],["t/135",[1,0.878]],["t/177",[1,0.878]],["t/182",[10,2.211]],["t/186",[1,0.878]],["t/192",[1,0.878]],["t/198",[10,2.211]],["t/202",[1,0.878]],["t/208",[11,3.784]],["t/214",[12,3.236]],["t/281",[1,0.878]],["t/288",[12,3.236]],["t/310",[1,0.878]],["t/317",[1,0.878]],["t/322",[0,2.875]],["t/333",[1,0.878]],["t/349",[1,0.878]],["t/357",[1,0.878]],["t/365",[13,1.804,14,1.804,15,1.804,16,1.804]],["t/372",[17,3.784]],["t/379",[18,3.784]],["t/386",[19,2.771,20,2.771]],["t/395",[2,3.236]],["t/398",[21,3.784]],["t/405",[6,2.105,22,2.771]],["t/406",[5,2.369,6,2.105]],["t/412",[10,2.211]],["t/416",[23,3.784]],["t/427",[1,0.878]],["t/435",[24,2.771,25,2.771]],["t/440",[26,3.784]],["t/445",[10,2.211]],["t/449",[10,2.211]],["t/453",[1,0.878]],["t/455",[1,0.878]]],"invertedIndex":[["",{"_index":1,"t":{"8":{"position":[[0,4]]},"14":{"position":[[0,6]]},"24":{"position":[[0,6]]},"96":{"position":[[0,8]]},"117":{"position":[[0,2]]},"119":{"position":[[0,4]]},"127":{"position":[[0,4]]},"132":{"position":[[0,3]]},"135":{"position":[[0,6]]},"177":{"position":[[0,11]]},"186":{"position":[[0,4]]},"192":{"position":[[0,4]]},"202":{"position":[[0,12]]},"281":{"position":[[0,7]]},"310":{"position":[[0,3]]},"317":{"position":[[0,3]]},"333":{"position":[[0,8]]},"349":{"position":[[0,7]]},"357":{"position":[[0,8]]},"427":{"position":[[0,4]]},"453":{"position":[[0,3]]},"455":{"position":[[0,6]]}}}],["0",{"_index":9,"t":{"107":{"position":[[15,4]]}}}],["30",{"_index":11,"t":{"208":{"position":[[0,12]]}}}],["alexnet",{"_index":17,"t":{"372":{"position":[[0,7]]}}}],["attent",{"_index":6,"t":{"104":{"position":[[21,9]]},"405":{"position":[[0,9]]},"406":{"position":[[5,9]]}}}],["cross",{"_index":15,"t":{"365":{"position":[[7,5]]}}}],["fold",{"_index":14,"t":{"365":{"position":[[2,4]]}}}],["geeko",{"_index":7,"t":{"107":{"position":[[0,6]]}}}],["k",{"_index":13,"t":{"365":{"position":[[0,1]]}}}],["kde",{"_index":24,"t":{"435":{"position":[[0,8]]}}}],["lenet",{"_index":18,"t":{"379":{"position":[[0,5]]}}}],["linux系统下geeko",{"_index":3,"t":{"82":{"position":[[0,19]]}}}],["logist",{"_index":19,"t":{"386":{"position":[[0,10]]}}}],["loss",{"_index":0,"t":{"2":{"position":[[0,12]]},"18":{"position":[[0,12]]},"322":{"position":[[0,12]]}}}],["need",{"_index":22,"t":{"405":{"position":[[21,4]]}}}],["perceptron",{"_index":21,"t":{"398":{"position":[[0,10]]}}}],["plasma看起来更像maco",{"_index":25,"t":{"435":{"position":[[9,16]]}}}],["project",{"_index":8,"t":{"107":{"position":[[7,7]]}}}],["pytorch",{"_index":2,"t":{"10":{"position":[[0,9]]},"395":{"position":[[0,9]]}}}],["regress",{"_index":20,"t":{"386":{"position":[[11,10]]}}}],["self",{"_index":5,"t":{"104":{"position":[[16,4]]},"406":{"position":[[0,4]]}}}],["stl",{"_index":12,"t":{"214":{"position":[[0,8]]},"288":{"position":[[0,5]]}}}],["transform",{"_index":4,"t":{"104":{"position":[[0,11]]}}}],["valid",{"_index":16,"t":{"365":{"position":[[13,10]]}}}],["visdom",{"_index":23,"t":{"416":{"position":[[0,9]]}}}],["welcom",{"_index":10,"t":{"113":{"position":[[0,7]]},"182":{"position":[[0,7]]},"198":{"position":[[0,7]]},"412":{"position":[[0,7]]},"445":{"position":[[0,7]]},"449":{"position":[[0,7]]}}}],["window",{"_index":26,"t":{"440":{"position":[[0,16]]}}}]],"pipeline":["stemmer"]}},{"documents":[{"i":4,"t":"一、激活函数","u":"/blog/激活函数与Loss的梯度","h":"#一激活函数","p":2},{"i":6,"t":"二、损失函数","u":"/blog/激活函数与Loss的梯度","h":"#二损失函数","p":2},{"i":12,"t":"一、常用函数部分","u":"/blog/PyTroch基础","h":"#一常用函数部分","p":10},{"i":16,"t":"矩阵 / 向量的内积和外积","u":"/blog/数学基础","h":"#矩阵--向量的内积和外积","p":14},{"i":20,"t":"一、激活函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#一激活函数","p":18},{"i":22,"t":"二、损失函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#二损失函数","p":18},{"i":25,"t":"第一章:前言","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第一章前言","p":24},{"i":26,"t":"1.1 编译程序的逻辑结构","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#11-编译程序的逻辑结构","p":24},{"i":28,"t":"1.2 前端和后端","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#12-前端和后端","p":24},{"i":30,"t":"1.3 遍的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#13-遍的概念","p":24},{"i":32,"t":"第二章:文法和语言","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第二章文法和语言","p":24},{"i":33,"t":"2.1 句型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#21-句型","p":24},{"i":35,"t":"2.2 句子:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#22-句子","p":24},{"i":37,"t":"2.3 文法的分类:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#23-文法的分类","p":24},{"i":39,"t":"2.4 最左/右推导:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#24-最左右推导","p":24},{"i":41,"t":"第三章:词法分析","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第三章词法分析","p":24},{"i":42,"t":"3.1 正规文法转换成正规式","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#31-正规文法转换成正规式","p":24},{"i":43,"t":"3.2 有穷自动机(FA)","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#32-有穷自动机fa","p":24},{"i":45,"t":"3.3 正规式RE与有穷自动机FA的互相转化","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#33-正规式re与有穷自动机fa的互相转化","p":24},{"i":46,"t":"3.4 正规文法RM与有穷自动机FA的互相转化","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#34-正规文法rm与有穷自动机fa的互相转化","p":24},{"i":47,"t":"第四章:自顶向下语法分析方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第四章自顶向下语法分析方法","p":24},{"i":49,"t":"1. FIRST集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#1-first集的定义","p":24},{"i":50,"t":"2. Follow集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-follow集的定义","p":24},{"i":52,"t":"3. SELECT集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-select集的定义","p":24},{"i":54,"t":"4. LL(1)文法的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#4-ll1文法的定义","p":24},{"i":55,"t":"5. LL(1)文法的判别","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-ll1文法的判别","p":24},{"i":57,"t":"6. 预测分析表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#6-预测分析表","p":24},{"i":59,"t":"7. 非LL(1)文法到LL(1)文法的等价变换","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#7-非ll1文法到ll1文法的等价变换","p":24},{"i":61,"t":"第五章:自底向上语法分析方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第五章自底向上语法分析方法","p":24},{"i":62,"t":"5.1 概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#51-概念","p":24},{"i":64,"t":"5.2 方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#52-方法","p":24},{"i":66,"t":"5.3 工作过程","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#53-工作过程","p":24},{"i":67,"t":"5.4 移入-归约分析器的4种动作","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#54-移入-归约分析器的4种动作","p":24},{"i":69,"t":"5.5 重要题型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#55-重要题型","p":24},{"i":71,"t":"概念总结","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#概念总结","p":24},{"i":72,"t":"1 编译程序各阶段功能","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#1-编译程序各阶段功能","p":24},{"i":74,"t":"2 语法分析方法的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-语法分析方法的概念","p":24},{"i":76,"t":"3 翻译模式","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-翻译模式","p":24},{"i":78,"t":"4 属性文法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#4-属性文法","p":24},{"i":80,"t":"5 符号表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-符号表","p":24},{"i":83,"t":"一、必须要知道的原理","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#一必须要知道的原理","p":82},{"i":84,"t":"1. GeekOS:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-geekos","p":82},{"i":86,"t":"2. bochs:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-bochs","p":82},{"i":88,"t":"3. 二者之间的关系","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#3-二者之间的关系","p":82},{"i":90,"t":"二、安装与配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#二安装与配置","p":82},{"i":92,"t":"1. 安装","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-安装","p":82},{"i":94,"t":"2. 配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-配置","p":82},{"i":97,"t":"一、基础知识","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#一基础知识","p":96},{"i":99,"t":"二、指令系统","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#二指令系统","p":96},{"i":100,"t":"三、存储系统","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#三存储系统","p":96},{"i":102,"t":"四、流水线","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#四流水线","p":96},{"i":105,"t":"一、模型概述","u":"/docs/课程学习/计算机图形学/Transformer and self-attention","h":"#一模型概述","p":104},{"i":106,"t":"二、CNN与self-attention","u":"/docs/课程学习/计算机图形学/Transformer and self-attention","h":"#二cnn与self-attention","p":104},{"i":109,"t":"1. 编写C语言代码","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#1-编写c语言代码","p":107},{"i":111,"t":"2. 使用Linux的编译系统对C语言代码进行编译","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#2-使用linux的编译系统对c语言代码进行编译","p":107},{"i":115,"t":"支持我!","u":"/docs/课程学习/intro","h":"#支持我","p":113},{"i":121,"t":"思路","u":"/docs/推免/机试/大数除法","h":"#思路","p":119},{"i":123,"t":"参考代码","u":"/docs/推免/机试/大数除法","h":"#参考代码","p":119},{"i":125,"t":"扩展","u":"/docs/推免/机试/大数除法","h":"#扩展","p":119},{"i":128,"t":"树","u":"/docs/推免/计算机基础综合/数据结构","h":"#树","p":127},{"i":130,"t":"图","u":"/docs/推免/计算机基础综合/数据结构","h":"#图","p":127},{"i":133,"t":"面试常考问题","u":"/docs/推免/数学/概率论","h":"#面试常考问题","p":132},{"i":136,"t":"一、U-2-Net","u":"/docs/推免/简历/简历面试准备","h":"#一u-2-net","p":135},{"i":137,"t":"(一)SOD任务","u":"/docs/推免/简历/简历面试准备","h":"#一sod任务","p":135},{"i":139,"t":"(二)网络结构","u":"/docs/推免/简历/简历面试准备","h":"#二网络结构","p":135},{"i":141,"t":"(三)损失函数","u":"/docs/推免/简历/简历面试准备","h":"#三损失函数","p":135},{"i":143,"t":"(四)深度可分离卷积","u":"/docs/推免/简历/简历面试准备","h":"#四深度可分离卷积","p":135},{"i":145,"t":"二、YOLO","u":"/docs/推免/简历/简历面试准备","h":"#二yolo","p":135},{"i":146,"t":"(一)mAP","u":"/docs/推免/简历/简历面试准备","h":"#一map","p":135},{"i":148,"t":"(二)YOLOv1","u":"/docs/推免/简历/简历面试准备","h":"#二yolov1","p":135},{"i":150,"t":"(二)YOLOv2","u":"/docs/推免/简历/简历面试准备","h":"#二yolov2","p":135},{"i":152,"t":"(三)YOLOv5","u":"/docs/推免/简历/简历面试准备","h":"#三yolov5","p":135},{"i":154,"t":"三、CBAM","u":"/docs/推免/简历/简历面试准备","h":"#三cbam","p":135},{"i":156,"t":"(一)总体结构","u":"/docs/推免/简历/简历面试准备","h":"#一总体结构","p":135},{"i":158,"t":"(二)通道注意力","u":"/docs/推免/简历/简历面试准备","h":"#二通道注意力","p":135},{"i":160,"t":"(三)空间注意力","u":"/docs/推免/简历/简历面试准备","h":"#三空间注意力","p":135},{"i":162,"t":"(四)其他注意事项","u":"/docs/推免/简历/简历面试准备","h":"#四其他注意事项","p":135},{"i":164,"t":"四、Focal Loss","u":"/docs/推免/简历/简历面试准备","h":"#四focal-loss","p":135},{"i":166,"t":"五、SENet","u":"/docs/推免/简历/简历面试准备","h":"#五senet","p":135},{"i":168,"t":"六、自注意力机制","u":"/docs/推免/简历/简历面试准备","h":"#六自注意力机制","p":135},{"i":170,"t":"七、自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#七自我介绍","p":135},{"i":171,"t":"(一)英文自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#一英文自我介绍","p":135},{"i":173,"t":"(二)西电广研院自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#二西电广研院自我介绍","p":135},{"i":175,"t":"(三)电子科技大学自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#三电子科技大学自我介绍","p":135},{"i":178,"t":"一、线性代数","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#一线性代数","p":177},{"i":180,"t":"二、概率论","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#二概率论","p":177},{"i":184,"t":"支持我!","u":"/docs/推免/intro","h":"#支持我","p":182},{"i":188,"t":"一、基础知识","u":"/docs/推免/数学/线性代数","h":"#一基础知识","p":186},{"i":190,"t":"二、面试常考问题","u":"/docs/推免/数学/线性代数","h":"#二面试常考问题","p":186},{"i":194,"t":"参考代码","u":"/docs/Algorithms/题解/反序输出","h":"#参考代码","p":192},{"i":196,"t":"题解","u":"/docs/Algorithms/题解/反序输出","h":"#题解","p":192},{"i":200,"t":"支持我!","u":"/docs/Algorithms/intro","h":"#支持我","p":198},{"i":204,"t":"参考代码","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#参考代码","p":202},{"i":206,"t":"题解","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#题解","p":202},{"i":210,"t":"参考代码","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#参考代码","p":208},{"i":212,"t":"题解","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#题解","p":208},{"i":216,"t":"vs2018 快捷键","u":"/docs/Algorithms/机试技巧与STL","h":"#vs2018-快捷键","p":214},{"i":218,"t":"头文件","u":"/docs/Algorithms/机试技巧与STL","h":"#头文件","p":214},{"i":219,"t":"标准c库","u":"/docs/Algorithms/机试技巧与STL","h":"#标准c库","p":214},{"i":221,"t":"c++ STL","u":"/docs/Algorithms/机试技巧与STL","h":"#c-stl","p":214},{"i":223,"t":"常用头","u":"/docs/Algorithms/机试技巧与STL","h":"#常用头","p":214},{"i":225,"t":"常用宏定义","u":"/docs/Algorithms/机试技巧与STL","h":"#常用宏定义","p":214},{"i":227,"t":"结构体","u":"/docs/Algorithms/机试技巧与STL","h":"#结构体","p":214},{"i":228,"t":"定义","u":"/docs/Algorithms/机试技巧与STL","h":"#定义","p":214},{"i":230,"t":"初始化","u":"/docs/Algorithms/机试技巧与STL","h":"#初始化","p":214},{"i":232,"t":"运算符重载","u":"/docs/Algorithms/机试技巧与STL","h":"#运算符重载","p":214},{"i":234,"t":"c++new的使用","u":"/docs/Algorithms/机试技巧与STL","h":"#cnew的使用","p":214},{"i":235,"t":"常规","u":"/docs/Algorithms/机试技巧与STL","h":"#常规","p":214},{"i":237,"t":"动态申请列大小固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请列大小固定的二维数组","p":214},{"i":239,"t":"动态申请大小不固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请大小不固定的二维数组","p":214},{"i":241,"t":"常用STL","u":"/docs/Algorithms/机试技巧与STL","h":"#常用stl","p":214},{"i":243,"t":"简述","u":"/docs/Algorithms/机试技巧与STL","h":"#简述","p":214},{"i":245,"t":"algorithm","u":"/docs/Algorithms/机试技巧与STL","h":"#algorithm","p":214},{"i":247,"t":"vector","u":"/docs/Algorithms/机试技巧与STL","h":"#vector","p":214},{"i":249,"t":"list","u":"/docs/Algorithms/机试技巧与STL","h":"#list","p":214},{"i":251,"t":"string","u":"/docs/Algorithms/机试技巧与STL","h":"#string","p":214},{"i":253,"t":"pair","u":"/docs/Algorithms/机试技巧与STL","h":"#pair","p":214},{"i":255,"t":"map","u":"/docs/Algorithms/机试技巧与STL","h":"#map","p":214},{"i":257,"t":"stack","u":"/docs/Algorithms/机试技巧与STL","h":"#stack","p":214},{"i":259,"t":"queue","u":"/docs/Algorithms/机试技巧与STL","h":"#queue","p":214},{"i":261,"t":"set","u":"/docs/Algorithms/机试技巧与STL","h":"#set","p":214},{"i":263,"t":"multiset","u":"/docs/Algorithms/机试技巧与STL","h":"#multiset","p":214},{"i":265,"t":"bitset","u":"/docs/Algorithms/机试技巧与STL","h":"#bitset","p":214},{"i":267,"t":"图模板","u":"/docs/Algorithms/机试技巧与STL","h":"#图模板","p":214},{"i":268,"t":"不带出入度的最简模板","u":"/docs/Algorithms/机试技巧与STL","h":"#不带出入度的最简模板","p":214},{"i":270,"t":"带出入度的 (2019推免试题)","u":"/docs/Algorithms/机试技巧与STL","h":"#带出入度的-2019推免试题","p":214},{"i":272,"t":"图算法:找出u到v的所有路径-邻接表","u":"/docs/Algorithms/机试技巧与STL","h":"#图算法找出u到v的所有路径-邻接表","p":214},{"i":274,"t":"树模板","u":"/docs/Algorithms/机试技巧与STL","h":"#树模板","p":214},{"i":275,"t":"注释版","u":"/docs/Algorithms/机试技巧与STL","h":"#注释版","p":214},{"i":277,"t":"简化版(Val As Index,若数据不在1~N内,则可能越界)","u":"/docs/Algorithms/机试技巧与STL","h":"#简化版val-as-index若数据不在1n内则可能越界","p":214},{"i":279,"t":"简化版(Val Not As Index,可以存任意的 Val)","u":"/docs/Algorithms/机试技巧与STL","h":"#简化版val-not-as-index可以存任意的-val","p":214},{"i":282,"t":"一、卷积的诞生&核心特征","u":"/docs/Deep Learning/基础知识/从全连接到卷积","h":"#一卷积的诞生核心特征","p":281},{"i":284,"t":"二、重新考察全连接层","u":"/docs/Deep Learning/基础知识/从全连接到卷积","h":"#二重新考察全连接层","p":281},{"i":286,"t":"三、总结","u":"/docs/Deep Learning/基础知识/从全连接到卷积","h":"#三总结","p":281},{"i":290,"t":"vector","u":"/docs/Algorithms/STL模板","h":"#vector","p":288},{"i":292,"t":"pair","u":"/docs/Algorithms/STL模板","h":"#pair","p":288},{"i":294,"t":"string","u":"/docs/Algorithms/STL模板","h":"#string","p":288},{"i":296,"t":"query","u":"/docs/Algorithms/STL模板","h":"#query","p":288},{"i":298,"t":"priority_queue","u":"/docs/Algorithms/STL模板","h":"#priority_queue","p":288},{"i":300,"t":"stack","u":"/docs/Algorithms/STL模板","h":"#stack","p":288},{"i":302,"t":"deque","u":"/docs/Algorithms/STL模板","h":"#deque","p":288},{"i":304,"t":"set/multiset","u":"/docs/Algorithms/STL模板","h":"#setmultiset","p":288},{"i":306,"t":"map/multimap","u":"/docs/Algorithms/STL模板","h":"#mapmultimap","p":288},{"i":308,"t":"biset","u":"/docs/Algorithms/STL模板","h":"#biset","p":288},{"i":311,"t":"一、卷积对像素位置信息是敏感的","u":"/docs/Deep Learning/基础知识/池化层","h":"#一卷积对像素位置信息是敏感的","p":310},{"i":313,"t":"二、池化层的作用","u":"/docs/Deep Learning/基础知识/池化层","h":"#二池化层的作用","p":310},{"i":315,"t":"三、池化的实现","u":"/docs/Deep Learning/基础知识/池化层","h":"#三池化的实现","p":310},{"i":318,"t":"一、1x1卷积","u":"/docs/Deep Learning/基础知识/卷积层","h":"#一1x1卷积","p":317},{"i":320,"t":"二、二维卷积层","u":"/docs/Deep Learning/基础知识/卷积层","h":"#二二维卷积层","p":317},{"i":323,"t":"一、激活函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#一激活函数","p":322},{"i":324,"t":"1. Sigmoid函数 / Logistic函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#1-sigmoid函数--logistic函数","p":322},{"i":326,"t":"2. 线性整流单元(Rectified Linear Unit, ReLU)","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#2-线性整流单元rectified-linear-unit-relu","p":322},{"i":328,"t":"二、损失函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#二损失函数","p":322},{"i":329,"t":"1. Mean Squared Error 均方误差","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#1-mean-squared-error-均方误差","p":322},{"i":331,"t":"2. Cross Entropy Loss 交叉熵损失","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#2-cross-entropy-loss-交叉熵损失","p":322},{"i":335,"t":"L1和L2是什么?","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1和l2是什么","p":333},{"i":337,"t":"Model","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#model","p":333},{"i":339,"t":"损失函数","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#损失函数","p":333},{"i":341,"t":"如何避免过拟合","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#如何避免过拟合","p":333},{"i":343,"t":"有正则化与没有正则化","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#有正则化与没有正则化","p":333},{"i":345,"t":"L1 vs L2","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1-vs-l2","p":333},{"i":347,"t":"L1的稀疏性","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1的稀疏性","p":333},{"i":351,"t":"常规卷积","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#常规卷积","p":349},{"i":353,"t":"(1)逐通道卷积-Depthwise Convolution","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#1逐通道卷积-depthwise-convolution","p":349},{"i":355,"t":"(2)逐点卷积-Pointwise Convolution","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#2逐点卷积-pointwise-convolution","p":349},{"i":359,"t":"一、什么是正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#一什么是正则化","p":357},{"i":361,"t":"二、L1正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#二l1正则化","p":357},{"i":363,"t":"三、L2正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#三l2正则化与权重衰退","p":357},{"i":366,"t":"What is k-fold cross-validation?","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#what-is-k-fold-cross-validation","p":365},{"i":368,"t":"How does k-fold cross-validation work?","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#how-does-k-fold-cross-validation-work","p":365},{"i":370,"t":"Summary","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#summary","p":365},{"i":373,"t":"背景","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#背景","p":372},{"i":375,"t":"新的概念和技术","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#新的概念和技术","p":372},{"i":377,"t":"与LeNet比较","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#与lenet比较","p":372},{"i":380,"t":"背景","u":"/docs/Deep Learning/经典模型/LeNet","h":"#背景","p":379},{"i":382,"t":"代码实现","u":"/docs/Deep Learning/经典模型/LeNet","h":"#代码实现","p":379},{"i":384,"t":"问题","u":"/docs/Deep Learning/经典模型/LeNet","h":"#问题","p":379},{"i":387,"t":"一、什么是Logistic Regression","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#一什么是logistic-regression","p":386},{"i":389,"t":"二、逻辑回归(Logistic Regression)和线性回归(Linear Regression)","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#二逻辑回归logistic-regression和线性回归linear-regression","p":386},{"i":391,"t":"三、逻辑回归到底是回归任务(Regression)还是分类任务(Classification)?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#三逻辑回归到底是回归任务regression还是分类任务classification","p":386},{"i":393,"t":"四、为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#四为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数","p":386},{"i":396,"t":"一、常用函数部分","u":"/docs/Deep Learning/基础知识/PyTroch基础","h":"#一常用函数部分","p":395},{"i":399,"t":"一、什么是感知机","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#一什么是感知机","p":398},{"i":401,"t":"二、详细原理","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#二详细原理","p":398},{"i":403,"t":"三、总结","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#三总结","p":398},{"i":407,"t":"CNN的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#cnn的局限性","p":406},{"i":408,"t":"输入与输出的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#输入与输出的局限性","p":406},{"i":410,"t":"关联上下文的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#关联上下文的局限性","p":406},{"i":414,"t":"支持我!","u":"/docs/Deep Learning/intro","h":"#支持我","p":412},{"i":418,"t":"一、安装Visdom","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#一安装visdom","p":416},{"i":420,"t":"二、Visdom的使用","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#二visdom的使用","p":416},{"i":421,"t":"0. Visdom的启动","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#0-visdom的启动","p":416},{"i":423,"t":"1. 单窗口单曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#1-单窗口单曲线的可视化","p":416},{"i":425,"t":"2. 单窗口多曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#2-单窗口多曲线的可视化","p":416},{"i":429,"t":"一、编写脚本","u":"/docs/Linux/实用工具/终端代理","h":"#一编写脚本","p":427},{"i":431,"t":"二、关联终端配置文件","u":"/docs/Linux/实用工具/终端代理","h":"#二关联终端配置文件","p":427},{"i":433,"t":"三、使用","u":"/docs/Linux/实用工具/终端代理","h":"#三使用","p":427},{"i":436,"t":"一、latte-dock","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#一latte-dock","p":435},{"i":438,"t":"二、Kde Plasmoids","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#二kde-plasmoids","p":435},{"i":441,"t":"一、发生原因","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#一发生原因","p":440},{"i":443,"t":"二、解决方案","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#二解决方案","p":440},{"i":447,"t":"支持我!","u":"/docs/Others/intro","h":"#支持我","p":445},{"i":451,"t":"支持我!","u":"/docs/Linux/intro","h":"#支持我","p":449},{"i":457,"t":"一、自我介绍部分","u":"/docs/Others/面试/要准备的问题","h":"#一自我介绍部分","p":455},{"i":459,"t":"二、专业课面试题","u":"/docs/Others/面试/要准备的问题","h":"#二专业课面试题","p":455},{"i":461,"t":"三、自由面试题","u":"/docs/Others/面试/要准备的问题","h":"#三自由面试题","p":455}],"index":{"version":"2.3.9","fields":["t"],"fieldVectors":[["t/4",[0,0.507]],["t/6",[0,0.507]],["t/12",[0,0.507]],["t/16",[0,0.565]],["t/20",[0,0.507]],["t/22",[0,0.507]],["t/25",[0,0.507]],["t/26",[0,0.381,1,4.211]],["t/28",[0,0.381,2,4.211]],["t/30",[0,0.381,3,4.211]],["t/32",[0,0.507]],["t/33",[0,0.381,4,4.211]],["t/35",[0,0.381,5,4.211]],["t/37",[0,0.381,6,4.211]],["t/39",[0,0.381,7,4.211]],["t/41",[0,0.507]],["t/42",[0,0.381,8,4.211]],["t/43",[9,4.211,10,4.211]],["t/45",[11,4.211,12,4.211]],["t/46",[13,4.211,14,4.211]],["t/47",[0,0.507]],["t/49",[15,2.633,16,4.211]],["t/50",[17,2.548,18,4.211]],["t/52",[19,3.487,20,4.211]],["t/54",[21,3.487,22,3.774]],["t/55",[22,3.774,23,3.774]],["t/57",[0,0.381,24,4.211]],["t/59",[25,4.211,26,4.211]],["t/61",[0,0.507]],["t/62",[0,0.381,27,4.211]],["t/64",[0,0.381,28,4.211]],["t/66",[0,0.381,29,4.211]],["t/67",[0,0.306,21,2.795,30,3.376]],["t/69",[0,0.381,31,4.211]],["t/71",[0,0.507]],["t/72",[0,0.381,15,2.633]],["t/74",[0,0.381,17,2.548]],["t/76",[0,0.381,19,3.487]],["t/78",[0,0.381,21,3.487]],["t/80",[0,0.381,23,3.774]],["t/83",[0,0.507]],["t/84",[15,2.633,32,4.211]],["t/86",[17,2.548,33,4.211]],["t/88",[0,0.381,19,3.487]],["t/90",[0,0.507]],["t/92",[0,0.381,15,2.633]],["t/94",[0,0.381,17,2.548]],["t/97",[0,0.507]],["t/99",[0,0.507]],["t/100",[0,0.507]],["t/102",[0,0.507]],["t/105",[0,0.507]],["t/106",[34,4.211,35,4.211]],["t/109",[15,2.633,36,3.487]],["t/111",[17,2.548,37,4.211]],["t/115",[0,0.507]],["t/121",[0,0.507]],["t/123",[0,0.507]],["t/125",[0,0.507]],["t/128",[0,0.507]],["t/130",[0,0.507]],["t/133",[0,0.507]],["t/136",[17,2.042,38,3.376,39,3.376]],["t/137",[40,5.596]],["t/139",[0,0.507]],["t/141",[0,0.507]],["t/143",[0,0.507]],["t/145",[41,5.596]],["t/146",[42,5.016]],["t/148",[43,5.596]],["t/150",[44,5.596]],["t/152",[45,5.596]],["t/154",[46,5.596]],["t/156",[0,0.507]],["t/158",[0,0.507]],["t/160",[0,0.507]],["t/162",[0,0.507]],["t/164",[47,4.211,48,3.774]],["t/166",[49,5.596]],["t/168",[0,0.507]],["t/170",[0,0.507]],["t/171",[0,0.507]],["t/173",[0,0.507]],["t/175",[0,0.507]],["t/178",[0,0.507]],["t/180",[0,0.507]],["t/184",[0,0.507]],["t/188",[0,0.507]],["t/190",[0,0.507]],["t/194",[0,0.507]],["t/196",[0,0.507]],["t/200",[0,0.507]],["t/204",[0,0.507]],["t/206",[0,0.507]],["t/210",[0,0.507]],["t/212",[0,0.507]],["t/216",[0,0.381,50,4.211]],["t/218",[0,0.507]],["t/219",[36,4.634]],["t/221",[36,3.487,51,3.774]],["t/223",[0,0.507]],["t/225",[0,0.507]],["t/227",[0,0.507]],["t/228",[0,0.507]],["t/230",[0,0.507]],["t/232",[0,0.507]],["t/234",[52,5.596]],["t/235",[0,0.507]],["t/237",[0,0.507]],["t/239",[0,0.507]],["t/241",[51,5.016]],["t/243",[0,0.507]],["t/245",[53,5.596]],["t/247",[54,5.016]],["t/249",[55,5.596]],["t/251",[56,5.016]],["t/253",[57,5.016]],["t/255",[42,5.016]],["t/257",[58,5.016]],["t/259",[59,5.596]],["t/261",[60,5.596]],["t/263",[61,5.596]],["t/265",[62,5.596]],["t/267",[0,0.507]],["t/268",[0,0.507]],["t/270",[0,0.381,63,4.211]],["t/272",[0,0.381,64,4.211]],["t/274",[0,0.507]],["t/275",[0,0.507]],["t/277",[65,3.774,66,4.211]],["t/279",[65,4.614,67,3.376]],["t/282",[0,0.507]],["t/284",[0,0.507]],["t/286",[0,0.507]],["t/290",[54,5.016]],["t/292",[57,5.016]],["t/294",[56,5.016]],["t/296",[68,5.596]],["t/298",[69,5.596]],["t/300",[58,5.016]],["t/302",[70,5.596]],["t/304",[71,5.596]],["t/306",[72,5.596]],["t/308",[73,5.596]],["t/311",[0,0.507]],["t/313",[0,0.507]],["t/315",[0,0.507]],["t/318",[74,5.596]],["t/320",[0,0.507]],["t/323",[0,0.507]],["t/324",[0,0.255,15,1.762,75,2.817,76,2.332]],["t/326",[17,1.462,77,2.417,78,2.417,79,2.417,80,2.417]],["t/328",[0,0.507]],["t/329",[0,0.219,15,1.511,81,2.417,82,2.417,83,2.417]],["t/331",[0,0.219,17,1.462,48,2.166,84,2.001,85,2.417]],["t/335",[86,5.596]],["t/337",[87,5.596]],["t/339",[0,0.507]],["t/341",[0,0.507]],["t/343",[0,0.507]],["t/345",[88,2.795,89,3.376,90,3.026]],["t/347",[88,4.634]],["t/351",[0,0.507]],["t/353",[15,2.111,91,3.376,92,3.026]],["t/355",[17,2.042,92,3.026,93,3.376]],["t/359",[0,0.507]],["t/361",[88,4.634]],["t/363",[90,5.016]],["t/366",[84,2.332,94,2.525,95,2.525,96,2.525]],["t/368",[84,2.001,94,2.166,95,2.166,96,2.166,97,2.417]],["t/370",[98,5.596]],["t/373",[0,0.507]],["t/375",[0,0.507]],["t/377",[99,5.596]],["t/380",[0,0.507]],["t/382",[0,0.507]],["t/384",[0,0.507]],["t/387",[76,3.487,100,3.774]],["t/389",[76,2.795,100,3.026,101,3.376]],["t/391",[102,5.596]],["t/393",[0,0.507]],["t/396",[0,0.507]],["t/399",[0,0.507]],["t/401",[0,0.507]],["t/403",[0,0.507]],["t/407",[103,5.596]],["t/408",[0,0.507]],["t/410",[0,0.507]],["t/414",[0,0.507]],["t/418",[104,4.634]],["t/420",[104,4.634]],["t/421",[104,3.487,105,4.211]],["t/423",[0,0.381,15,2.633]],["t/425",[0,0.381,17,2.548]],["t/429",[0,0.507]],["t/431",[0,0.507]],["t/433",[0,0.507]],["t/436",[106,4.211,107,4.211]],["t/438",[108,4.211,109,4.211]],["t/441",[0,0.507]],["t/443",[0,0.507]],["t/447",[0,0.507]],["t/451",[0,0.507]],["t/457",[0,0.507]],["t/459",[0,0.507]],["t/461",[0,0.507]]],"invertedIndex":[["",{"_index":0,"t":{"4":{"position":[[0,6]]},"6":{"position":[[0,6]]},"12":{"position":[[0,8]]},"16":{"position":[[0,2],[3,1],[5,8]]},"20":{"position":[[0,6]]},"22":{"position":[[0,6]]},"25":{"position":[[0,6]]},"26":{"position":[[4,9]]},"28":{"position":[[4,5]]},"30":{"position":[[4,4]]},"32":{"position":[[0,9]]},"33":{"position":[[4,2]]},"35":{"position":[[4,3]]},"37":{"position":[[4,6]]},"39":{"position":[[4,7]]},"41":{"position":[[0,8]]},"42":{"position":[[4,10]]},"47":{"position":[[0,14]]},"57":{"position":[[3,5]]},"61":{"position":[[0,14]]},"62":{"position":[[4,2]]},"64":{"position":[[4,2]]},"66":{"position":[[4,4]]},"67":{"position":[[4,2]]},"69":{"position":[[4,4]]},"71":{"position":[[0,4]]},"72":{"position":[[2,9]]},"74":{"position":[[2,9]]},"76":{"position":[[2,4]]},"78":{"position":[[2,4]]},"80":{"position":[[2,3]]},"83":{"position":[[0,10]]},"88":{"position":[[3,7]]},"90":{"position":[[0,7]]},"92":{"position":[[3,2]]},"94":{"position":[[3,2]]},"97":{"position":[[0,6]]},"99":{"position":[[0,6]]},"100":{"position":[[0,6]]},"102":{"position":[[0,5]]},"105":{"position":[[0,6]]},"115":{"position":[[0,4]]},"121":{"position":[[0,2]]},"123":{"position":[[0,4]]},"125":{"position":[[0,2]]},"128":{"position":[[0,1]]},"130":{"position":[[0,1]]},"133":{"position":[[0,6]]},"139":{"position":[[0,7]]},"141":{"position":[[0,7]]},"143":{"position":[[0,10]]},"156":{"position":[[0,7]]},"158":{"position":[[0,8]]},"160":{"position":[[0,8]]},"162":{"position":[[0,9]]},"168":{"position":[[0,8]]},"170":{"position":[[0,6]]},"171":{"position":[[0,9]]},"173":{"position":[[0,12]]},"175":{"position":[[0,13]]},"178":{"position":[[0,6]]},"180":{"position":[[0,5]]},"184":{"position":[[0,4]]},"188":{"position":[[0,6]]},"190":{"position":[[0,8]]},"194":{"position":[[0,4]]},"196":{"position":[[0,2]]},"200":{"position":[[0,4]]},"204":{"position":[[0,4]]},"206":{"position":[[0,2]]},"210":{"position":[[0,4]]},"212":{"position":[[0,2]]},"216":{"position":[[7,3]]},"218":{"position":[[0,3]]},"223":{"position":[[0,3]]},"225":{"position":[[0,5]]},"227":{"position":[[0,3]]},"228":{"position":[[0,2]]},"230":{"position":[[0,3]]},"232":{"position":[[0,5]]},"235":{"position":[[0,2]]},"237":{"position":[[0,14]]},"239":{"position":[[0,14]]},"243":{"position":[[0,2]]},"267":{"position":[[0,3]]},"268":{"position":[[0,10]]},"270":{"position":[[0,5]]},"272":{"position":[[15,3]]},"274":{"position":[[0,3]]},"275":{"position":[[0,3]]},"282":{"position":[[0,12]]},"284":{"position":[[0,10]]},"286":{"position":[[0,4]]},"311":{"position":[[0,15]]},"313":{"position":[[0,8]]},"315":{"position":[[0,7]]},"320":{"position":[[0,7]]},"323":{"position":[[0,6]]},"324":{"position":[[13,1]]},"328":{"position":[[0,6]]},"329":{"position":[[22,4]]},"331":{"position":[[22,5]]},"339":{"position":[[0,4]]},"341":{"position":[[0,7]]},"343":{"position":[[0,10]]},"351":{"position":[[0,4]]},"359":{"position":[[0,8]]},"373":{"position":[[0,2]]},"375":{"position":[[0,7]]},"380":{"position":[[0,2]]},"382":{"position":[[0,4]]},"384":{"position":[[0,2]]},"393":{"position":[[0,31]]},"396":{"position":[[0,8]]},"399":{"position":[[0,8]]},"401":{"position":[[0,6]]},"403":{"position":[[0,4]]},"408":{"position":[[0,9]]},"410":{"position":[[0,9]]},"414":{"position":[[0,4]]},"423":{"position":[[3,10]]},"425":{"position":[[3,10]]},"429":{"position":[[0,6]]},"431":{"position":[[0,10]]},"433":{"position":[[0,4]]},"441":{"position":[[0,6]]},"443":{"position":[[0,6]]},"447":{"position":[[0,4]]},"451":{"position":[[0,4]]},"457":{"position":[[0,8]]},"459":{"position":[[0,8]]},"461":{"position":[[0,7]]}}}],["0",{"_index":105,"t":{"421":{"position":[[0,2]]}}}],["1",{"_index":15,"t":{"49":{"position":[[0,2]]},"72":{"position":[[0,1]]},"84":{"position":[[0,2]]},"92":{"position":[[0,2]]},"109":{"position":[[0,2]]},"324":{"position":[[0,2]]},"329":{"position":[[0,2]]},"353":{"position":[[0,8]]},"423":{"position":[[0,2]]}}}],["1.1",{"_index":1,"t":{"26":{"position":[[0,3]]}}}],["1.2",{"_index":2,"t":{"28":{"position":[[0,3]]}}}],["1.3",{"_index":3,"t":{"30":{"position":[[0,3]]}}}],["1x1",{"_index":74,"t":{"318":{"position":[[0,7]]}}}],["2",{"_index":17,"t":{"50":{"position":[[0,2]]},"74":{"position":[[0,1]]},"86":{"position":[[0,2]]},"94":{"position":[[0,2]]},"111":{"position":[[0,2]]},"136":{"position":[[4,1]]},"326":{"position":[[0,2]]},"331":{"position":[[0,2]]},"355":{"position":[[0,7]]},"425":{"position":[[0,2]]}}}],["2.1",{"_index":4,"t":{"33":{"position":[[0,3]]}}}],["2.2",{"_index":5,"t":{"35":{"position":[[0,3]]}}}],["2.3",{"_index":6,"t":{"37":{"position":[[0,3]]}}}],["2.4",{"_index":7,"t":{"39":{"position":[[0,3]]}}}],["2019",{"_index":63,"t":{"270":{"position":[[6,10]]}}}],["3",{"_index":19,"t":{"52":{"position":[[0,2]]},"76":{"position":[[0,1]]},"88":{"position":[[0,2]]}}}],["3.1",{"_index":8,"t":{"42":{"position":[[0,3]]}}}],["3.2",{"_index":9,"t":{"43":{"position":[[0,3]]}}}],["3.3",{"_index":11,"t":{"45":{"position":[[0,3]]}}}],["3.4",{"_index":13,"t":{"46":{"position":[[0,3]]}}}],["4",{"_index":21,"t":{"54":{"position":[[0,2]]},"67":{"position":[[7,10]]},"78":{"position":[[0,1]]}}}],["5",{"_index":23,"t":{"55":{"position":[[0,2]]},"80":{"position":[[0,1]]}}}],["5.1",{"_index":27,"t":{"62":{"position":[[0,3]]}}}],["5.2",{"_index":28,"t":{"64":{"position":[[0,3]]}}}],["5.3",{"_index":29,"t":{"66":{"position":[[0,3]]}}}],["5.4",{"_index":30,"t":{"67":{"position":[[0,3]]}}}],["5.5",{"_index":31,"t":{"69":{"position":[[0,3]]}}}],["6",{"_index":24,"t":{"57":{"position":[[0,2]]}}}],["7",{"_index":25,"t":{"59":{"position":[[0,2]]}}}],["algorithm",{"_index":53,"t":{"245":{"position":[[0,9]]}}}],["attent",{"_index":35,"t":{"106":{"position":[[11,9]]}}}],["biset",{"_index":73,"t":{"308":{"position":[[0,5]]}}}],["bitset",{"_index":62,"t":{"265":{"position":[[0,6]]}}}],["boch",{"_index":33,"t":{"86":{"position":[[3,6]]}}}],["c",{"_index":36,"t":{"109":{"position":[[3,7]]},"219":{"position":[[0,4]]},"221":{"position":[[0,3]]}}}],["c++new",{"_index":52,"t":{"234":{"position":[[0,9]]}}}],["cbam",{"_index":46,"t":{"154":{"position":[[0,6]]}}}],["cnn",{"_index":103,"t":{"407":{"position":[[0,7]]}}}],["cnn与self",{"_index":34,"t":{"106":{"position":[[0,10]]}}}],["convolut",{"_index":92,"t":{"353":{"position":[[19,11]]},"355":{"position":[[18,11]]}}}],["cross",{"_index":84,"t":{"331":{"position":[[3,5]]},"366":{"position":[[15,5]]},"368":{"position":[[16,5]]}}}],["depthwis",{"_index":91,"t":{"353":{"position":[[9,9]]}}}],["dequ",{"_index":70,"t":{"302":{"position":[[0,5]]}}}],["dock",{"_index":107,"t":{"436":{"position":[[8,4]]}}}],["entropi",{"_index":85,"t":{"331":{"position":[[9,7]]}}}],["error",{"_index":83,"t":{"329":{"position":[[16,5]]}}}],["fa",{"_index":10,"t":{"43":{"position":[[4,9]]}}}],["first",{"_index":16,"t":{"49":{"position":[[3,9]]}}}],["focal",{"_index":47,"t":{"164":{"position":[[0,7]]}}}],["fold",{"_index":95,"t":{"366":{"position":[[10,4]]},"368":{"position":[[11,4]]}}}],["follow",{"_index":18,"t":{"50":{"position":[[3,10]]}}}],["geeko",{"_index":32,"t":{"84":{"position":[[3,7]]}}}],["index",{"_index":67,"t":{"279":{"position":[[15,12]]}}}],["index,若数据不在1~n",{"_index":66,"t":{"277":{"position":[[11,22]]}}}],["k",{"_index":94,"t":{"366":{"position":[[8,1]]},"368":{"position":[[9,1]]}}}],["kde",{"_index":108,"t":{"438":{"position":[[0,5]]}}}],["l1",{"_index":88,"t":{"345":{"position":[[0,2]]},"347":{"position":[[0,6]]},"361":{"position":[[0,7]]}}}],["l1和l2",{"_index":86,"t":{"335":{"position":[[0,9]]}}}],["l2",{"_index":90,"t":{"345":{"position":[[6,2]]},"363":{"position":[[0,12]]}}}],["latt",{"_index":106,"t":{"436":{"position":[[0,7]]}}}],["lenet",{"_index":99,"t":{"377":{"position":[[0,8]]}}}],["linear",{"_index":78,"t":{"326":{"position":[[20,6]]}}}],["linux的编译系统对c",{"_index":37,"t":{"111":{"position":[[3,22]]}}}],["list",{"_index":55,"t":{"249":{"position":[[0,4]]}}}],["ll(1",{"_index":22,"t":{"54":{"position":[[3,10]]},"55":{"position":[[3,10]]}}}],["ll(1)文法到ll(1",{"_index":26,"t":{"59":{"position":[[3,21]]}}}],["logist",{"_index":76,"t":{"324":{"position":[[15,10]]},"387":{"position":[[0,13]]},"389":{"position":[[0,15]]}}}],["loss",{"_index":48,"t":{"164":{"position":[[8,4]]},"331":{"position":[[17,4]]}}}],["map",{"_index":42,"t":{"146":{"position":[[0,6]]},"255":{"position":[[0,3]]}}}],["map/multimap",{"_index":72,"t":{"306":{"position":[[0,12]]}}}],["mean",{"_index":81,"t":{"329":{"position":[[3,4]]}}}],["model",{"_index":87,"t":{"337":{"position":[[0,5]]}}}],["multiset",{"_index":61,"t":{"263":{"position":[[0,8]]}}}],["net",{"_index":39,"t":{"136":{"position":[[6,3]]}}}],["pair",{"_index":57,"t":{"253":{"position":[[0,4]]},"292":{"position":[[0,4]]}}}],["plasmoid",{"_index":109,"t":{"438":{"position":[[6,9]]}}}],["pointwis",{"_index":93,"t":{"355":{"position":[[8,9]]}}}],["priority_queu",{"_index":69,"t":{"298":{"position":[[0,14]]}}}],["queri",{"_index":68,"t":{"296":{"position":[[0,5]]}}}],["queue",{"_index":59,"t":{"259":{"position":[[0,5]]}}}],["rectifi",{"_index":77,"t":{"326":{"position":[[3,16]]}}}],["regress",{"_index":100,"t":{"387":{"position":[[14,10]]},"389":{"position":[[40,11]]}}}],["regression)和线性回归(linear",{"_index":101,"t":{"389":{"position":[[16,23]]}}}],["regression)还是分类任务(classif",{"_index":102,"t":{"391":{"position":[[0,48]]}}}],["relu",{"_index":80,"t":{"326":{"position":[[33,5]]}}}],["re与有穷自动机fa",{"_index":12,"t":{"45":{"position":[[4,18]]}}}],["rm与有穷自动机fa",{"_index":14,"t":{"46":{"position":[[4,19]]}}}],["select",{"_index":20,"t":{"52":{"position":[[3,10]]}}}],["senet",{"_index":49,"t":{"166":{"position":[[0,7]]}}}],["set",{"_index":60,"t":{"261":{"position":[[0,3]]}}}],["set/multiset",{"_index":71,"t":{"304":{"position":[[0,12]]}}}],["sigmoid",{"_index":75,"t":{"324":{"position":[[3,9]]}}}],["sod",{"_index":40,"t":{"137":{"position":[[0,8]]}}}],["squar",{"_index":82,"t":{"329":{"position":[[8,7]]}}}],["stack",{"_index":58,"t":{"257":{"position":[[0,5]]},"300":{"position":[[0,5]]}}}],["stl",{"_index":51,"t":{"221":{"position":[[4,3]]},"241":{"position":[[0,5]]}}}],["string",{"_index":56,"t":{"251":{"position":[[0,6]]},"294":{"position":[[0,6]]}}}],["summari",{"_index":98,"t":{"370":{"position":[[0,7]]}}}],["u",{"_index":38,"t":{"136":{"position":[[0,3]]}}}],["unit",{"_index":79,"t":{"326":{"position":[[27,5]]}}}],["u到v",{"_index":64,"t":{"272":{"position":[[0,14]]}}}],["val",{"_index":65,"t":{"277":{"position":[[0,7]]},"279":{"position":[[0,7],[28,4]]}}}],["valid",{"_index":96,"t":{"366":{"position":[[21,11]]},"368":{"position":[[22,10]]}}}],["vector",{"_index":54,"t":{"247":{"position":[[0,6]]},"290":{"position":[[0,6]]}}}],["visdom",{"_index":104,"t":{"418":{"position":[[0,10]]},"420":{"position":[[0,11]]},"421":{"position":[[3,9]]}}}],["vs",{"_index":89,"t":{"345":{"position":[[3,2]]}}}],["vs2018",{"_index":50,"t":{"216":{"position":[[0,6]]}}}],["work",{"_index":97,"t":{"368":{"position":[[33,5]]}}}],["yolo",{"_index":41,"t":{"145":{"position":[[0,6]]}}}],["yolov1",{"_index":43,"t":{"148":{"position":[[0,9]]}}}],["yolov2",{"_index":44,"t":{"150":{"position":[[0,9]]}}}],["yolov5",{"_index":45,"t":{"152":{"position":[[0,9]]}}}]],"pipeline":["stemmer"]}},{"documents":[{"i":3,"t":"一、激活函数​ Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"激活函数与Loss的梯度","u":"/blog/激活函数与Loss的梯度","h":"","p":2},{"i":5,"t":"Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"一、激活函数","u":"/blog/激活函数与Loss的梯度","h":"#一激活函数","p":2},{"i":7,"t":"Mean Squared Error 均方误差 L2范数是对元素求平方和后再开根号,需要.pow(2)后才可作为损失函数 微小的误差可能对网络性能带来极大的影响 LossMSE=∑[y−f(x)]2(1)Loss_{MSE} = \\sum{[{y - f(x)]^2}} \\tag{1}LossMSE​=∑[y−f(x)]2(1) ∥y−f(x)∥2=∑[y−f(x)]22(2)\\Vert y - f(x) \\Vert_2 = \\sqrt[2]{\\sum{[y - f(x)]^2}} \\tag{2}∥y−f(x)∥2​=2∑[y−f(x)]2​(2) Cross Entropy Loss 交叉熵损失 binary 二分类问题 multi-class 多分类问题 经常与softmax激活函数搭配使用","s":"二、损失函数","u":"/blog/激活函数与Loss的梯度","h":"#二损失函数","p":2},{"i":9,"t":"梯度下降算法需要求整个数据集上的计算损失函数以及梯度,计算代价太大,因此常采用小批量随机梯度下降。在每个batch上计算损失函数以及梯度,近似损失。此时,batchsize越大,近似效果越好。 随机梯度下降的随机指的就是使用的数据是随机选择的mini batch数据,即Mini-Batch Gradient Descent。 然而,batchsize越小,收敛效果越好。随机梯度下降理论上带来了噪音,batchsize较小时带来的噪音较大,可以增加模型的鲁棒性。 前向传播(Forward Propagation):已知权重、偏置和输入,计算出损失函数 反向传播(Backward Propagation):求出损失函数对于每一个权重的偏导 交叉熵常来用于衡量两个概率之间的区别 交叉熵损失函数的梯度是真实概率和预测概率的区别 softmax激活函数常用于多分类问题。经过softmax函数后得到的输出为一组概率,概率非负且相加和为1 需要看的论文:ResNet,U-Net 训练优化方法: 初始化:恺明初始化方法 学习率: 动量:逃出局部最小值,可直观理解为惯性","s":"理论基础","u":"/blog/理论知识","h":"","p":8},{"i":11,"t":"一、常用函数部分​ concat与stack函数 stack函数对输入的两个张量在指定的维度进行堆叠,是==创建了新的维度== concat函数对输入的张量在指定维度进行拼接,没有创建新的维度 # stack和concat函数 a = torch.rand(4, 3) # A班4位同学,每位同学3科成绩 b = torch.rand(4, 3) # B班4位同学,每位同学3科成绩 c = torch.stack((a, b), dim=0) # 理解:年级所有同学的3科成绩(假设年级只有A班和B班两个班,每个班只有四名同学) print(c.shape) # torch.Size([2, 4, 3]) d = torch.concat((a, b), dim=1) # 理解:a是A班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6科成绩 print(d.shape) # torch.Size([4, 6]) list和tensor乘法不同之处 list的*乘法是复制元素,改变list的shape tensor的*乘法是对tensor中的元素进行点乘计算 a = torch.tensor([[3, 3, 3, 3]]) b = [3] # list的*乘是复制元素进行扩展 print(a * 3) # tensor([[9, 9, 9, 9]]) print(b * 3) # [3, 3, 3] 最大值 / 最小值索引:argmax / argmin 需要通过参数dim指定操作的维度,dim的理解 官方解释:The dimension to reduce 以二维张量举例,dim=1即在每一行中选出一个最大值 / 最小值元素的索引,索引的shape应为[dim0, 1],即reduce了dim=1的维度 # 最大值最小值索引 a = torch.tensor([[0.1, 0.9, 0.3], [0.9, 0.8, 0.99], [0.1, 0.7, 0.8], [0.88, 0.1, 0.2]]) # [4, 3] print(\"argmax output: \", a.argmax(dim=0), a.argmax(dim=1)) # argmax output: tensor([1, 0, 1]) tensor([1, 2, 2, 0]) Python zip函数 zip函数可以理解为压缩,将输入的两个迭代器的==最外层==对应元素压缩为一个新的元素 a = torch.tensor([1, 2, 3]) b = torch.tensor([4, 5, 6]) c = zip(a, b) for i in c: print(i) ''' (tensor(1), tensor(4)) (tensor(2), tensor(5)) (tensor(3), tensor(6)) ''' a = torch.tensor([[1, 2, 3], [3, 2, 1]]) b = torch.tensor([[4, 5, 6], [6, 5, 4]]) c = zip(a, b) for i in c: print(i) ''' (tensor([1, 2, 3]), tensor([4, 5, 6])) (tensor([3, 2, 1]), tensor([6, 5, 4])) '''","s":"PyTorch基础","u":"/blog/PyTroch基础","h":"","p":10},{"i":13,"t":"concat与stack函数 stack函数对输入的两个张量在指定的维度进行堆叠,是==创建了新的维度== concat函数对输入的张量在指定维度进行拼接,没有创建新的维度 # stack和concat函数 a = torch.rand(4, 3) # A班4位同学,每位同学3科成绩 b = torch.rand(4, 3) # B班4位同学,每位同学3科成绩 c = torch.stack((a, b), dim=0) # 理解:年级所有同学的3科成绩(假设年级只有A班和B班两个班,每个班只有四名同学) print(c.shape) # torch.Size([2, 4, 3]) d = torch.concat((a, b), dim=1) # 理解:a是A班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6科成绩 print(d.shape) # torch.Size([4, 6]) list和tensor乘法不同之处 list的*乘法是复制元素,改变list的shape tensor的*乘法是对tensor中的元素进行点乘计算 a = torch.tensor([[3, 3, 3, 3]]) b = [3] # list的*乘是复制元素进行扩展 print(a * 3) # tensor([[9, 9, 9, 9]]) print(b * 3) # [3, 3, 3] 最大值 / 最小值索引:argmax / argmin 需要通过参数dim指定操作的维度,dim的理解 官方解释:The dimension to reduce 以二维张量举例,dim=1即在每一行中选出一个最大值 / 最小值元素的索引,索引的shape应为[dim0, 1],即reduce了dim=1的维度 # 最大值最小值索引 a = torch.tensor([[0.1, 0.9, 0.3], [0.9, 0.8, 0.99], [0.1, 0.7, 0.8], [0.88, 0.1, 0.2]]) # [4, 3] print(\"argmax output: \", a.argmax(dim=0), a.argmax(dim=1)) # argmax output: tensor([1, 0, 1]) tensor([1, 2, 2, 0]) Python zip函数 zip函数可以理解为压缩,将输入的两个迭代器的==最外层==对应元素压缩为一个新的元素 a = torch.tensor([1, 2, 3]) b = torch.tensor([4, 5, 6]) c = zip(a, b) for i in c: print(i) ''' (tensor(1), tensor(4)) (tensor(2), tensor(5)) (tensor(3), tensor(6)) ''' a = torch.tensor([[1, 2, 3], [3, 2, 1]]) b = torch.tensor([[4, 5, 6], [6, 5, 4]]) c = zip(a, b) for i in c: print(i) ''' (tensor([1, 2, 3]), tensor([4, 5, 6])) (tensor([3, 2, 1]), tensor([6, 5, 4])) '''","s":"一、常用函数部分","u":"/blog/PyTroch基础","h":"#一常用函数部分","p":10},{"i":15,"t":"矩阵 / 向量的内积和外积​ 点乘:内积又称标量积,运算结果为标量,是将两个矩阵或向量的对应元素做乘法 叉乘:外积又称向量积,运算结果为向量,遵循行列式乘法规则","s":"基础数学知识","u":"/blog/数学基础","h":"","p":14},{"i":17,"t":"点乘:内积又称标量积,运算结果为标量,是将两个矩阵或向量的对应元素做乘法 叉乘:外积又称向量积,运算结果为向量,遵循行列式乘法规则","s":"矩阵 / 向量的内积和外积","u":"/blog/数学基础","h":"#矩阵--向量的内积和外积","p":14},{"i":19,"t":"一、激活函数​ Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"激活函数与Loss的梯度","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"","p":18},{"i":21,"t":"Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"一、激活函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#一激活函数","p":18},{"i":23,"t":"Mean Squared Error 均方误差 L2范数是对元素求平方和后再开根号,需要.pow(2)后才可作为损失函数 微小的误差可能对网络性能带来极大的影响 LossMSE=∑[y−f(x)]2(1)Loss_{MSE} = \\sum{[{y - f(x)]^2}} \\tag{1}LossMSE​=∑[y−f(x)]2(1) ∥y−f(x)∥2=∑[y−f(x)]22(2)\\Vert y - f(x) \\Vert_2 = \\sqrt[2]{\\sum{[y - f(x)]^2}} \\tag{2}∥y−f(x)∥2​=2∑[y−f(x)]2​(2) Cross Entropy Loss 交叉熵损失 binary 二分类问题 multi-class 多分类问题 经常与softmax激活函数搭配使用","s":"二、损失函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#二损失函数","p":18},{"i":27,"t":"词法分析:分析输入串如何构成句子,得到单词序列 语法分析:分析单词序列如何构成程序,构造语法分析树 语义分析:审查语义错误,为代码生成收集类型信息 中间代码生成 代码优化 目标代码生成 表管理、错误检查和处理贯穿整个过程","s":"1.1 编译程序的逻辑结构","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#11-编译程序的逻辑结构","p":24},{"i":29,"t":"前端是指与源语言有关、与目标机无关的部分 如词法分析、语法分析、语义分析、中间代码生成、代码优化中与机器无关的部分 后端是指与目标机有关的部分 如代码优化中与机器有关的部分、目标代码的生成","s":"1.2 前端和后端","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#12-前端和后端","p":24},{"i":31,"t":"遍是指从头到尾扫描一遍源程序","s":"1.3 遍的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#13-遍的概念","p":24},{"i":34,"t":"若从文法的开始符号开始存在以下推导,则称α\\alphaα为该文法的一个句型,句型中既可以包含终结符,也可以包含非终结符,也可以是空串 S⇒∗α, α∈(VT∪VN)∗(1)S \\Rightarrow^* \\alpha,\\space \\alpha \\in (V_T \\cup V_N)^* \\tag{1}S⇒∗α, α∈(VT​∪VN​)∗(1)","s":"2.1 句型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#21-句型","p":24},{"i":36,"t":"S⇒∗β, β∈VT∗(2)S \\Rightarrow^* \\beta,\\space \\beta \\in V_T^* \\tag{2}S⇒∗β, β∈VT∗​(2) 则称β\\betaβ是该文法的句子","s":"2.2 句子:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#22-句子","p":24},{"i":38,"t":"0型文法,又称无限制文法、短语文法 1型文法,又称文有关文法 2型文法,又称上下文无关文法(Context-Free Grammar,CFG) 可用来构建语法树,语法树是上下文无关文法推导和规约的图形化表示 A→β, A∈VN, β∈(VT∪VN)∗(3)\\Alpha \\rightarrow \\beta,\\space \\Alpha \\in V_N, \\space \\beta \\in (V_T \\cup V_N)^* \\tag{3}A→β, A∈VN​, β∈(VT​∪VN​)∗(3) 3型文法,又称正规文法(Regular Grammar,RG) 左线性文法 右线性文法","s":"2.3 文法的分类:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#23-文法的分类","p":24},{"i":40,"t":"如果在推导的任何一步都是对产生式左部中的最左/右非终结符进行替换,则称为最左/右推导,其中最右推导也被成为规范推导","s":"2.4 最左/右推导:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#24-最左右推导","p":24},{"i":44,"t":"确定的有穷自动机(DFA) DFA的定义及组成 确定的含义:在状态转换的每一步,FA根据当前的状态及扫描的输入字符,便能唯一地知道FA的下一状态。 提示 在状态转换图中的直观体现就是,在确定行表示的当前状态以及列确定的路径后,得到的目的状态不会是元素个数大于1的集合。 DFA的可接受以及接受集的定义:从开始状态开始,经过该符号串表示的路径,若能到达终态则称该符号串可被改DFA接受。 不确定的有穷自动机(NFA) NFA的确定化,即将NFA转换为DFA(子集法) 步骤: 画出DFA转换表 提示 转换表中在状态一列中,状态包含原NFA终态的集合要标*,代表其为等价DFA的终态 计算move(T,a)move(T, a)move(T,a) 计算ϵ−closure(T)\\epsilon -closure(T)ϵ−closure(T) 为转换表中的状态重命名 确定初态和终态 DFA的最小化(分割法) 步骤如下: 提示 考试时注意过程怎么写,下面使用需要三轮分割的列子演示步骤 在分割完成后,对可以化简的集合选出一个状态作为代表,删除其他多余状态,重新画图","s":"3.2 有穷自动机(FA)","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#32-有穷自动机fa","p":24},{"i":48,"t":"描述程序语法结构的规则可以使用2型文法(上下文无关语法,CFG) 语法分析方法包含确定的和不确定的分析方法,确定的语法分析方法根据输入符号,唯一选择产生式 确定的自顶向下分析方法:根据当前的输入符号唯一地确定选用哪个产生式替换相应的非终结符以往下推导","s":"第四章:自顶向下语法分析方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第四章自顶向下语法分析方法","p":24},{"i":51,"t":"提示 FOLLOW集的求法可以按照下图技巧进行 若要求的非终结符是开始符号,则直接将#插入FOLLOW集中 在所有产生式的右部中找到要求的非终结符 看非终结符的右侧是什么元素 若无元素,则直接将该产生式左部的FOLLOW集加入到该非终结符的FOLLOW集中 若为终结符,直接将该终结符加入到FOLLOW集中 若为非终结符,将FIRST(该非终结符)减去ϵ\\epsilonϵ的所有终结符元素都加入至FOLLOW集中","s":"2. Follow集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-follow集的定义","p":24},{"i":53,"t":"提示 需要注意的是FIRST集、FOLLOW集是针对于符号串而言的,而SELECT集是针对于产生式而言的","s":"3. SELECT集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-select集的定义","p":24},{"i":56,"t":"提示 考试时注意书写过程,需要画出以下两张表","s":"5. LL(1)文法的判别","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-ll1文法的判别","p":24},{"i":58,"t":"预测分析表通过计算SELECT集得到,形如下表 行标为各非终结符,列标为输入符号,若从某一非终结符开始的产生式的SELECT集包含某一输入符号,则对应产生式就是行列确定的元素值。","s":"6. 预测分析表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#6-预测分析表","p":24},{"i":60,"t":"消除左公因子(回溯) 警告 同一非终结符的多个产生式存在共同前缀,会导致回溯现象,需要消除 消除左递归 警告 左递归文法会使递归下降分析器陷入无限循环 消除直接左递归 消除间接左递归 通过代入法变成直接左递归再消除","s":"7. 非LL(1)文法到LL(1)文法的等价变换","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#7-非ll1文法到ll1文法的等价变换","p":24},{"i":63,"t":"从的底部向顶部的方向构造语法分析树,采用最左归约的方式,即最右推导的逆过程 提示 注意辨别:自顶向下的语法分析采用最左推导的方式 最右推导是规范推导,最左归约是最右推导的逆过程,又称规范归约","s":"5.1 概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#51-概念","p":24},{"i":65,"t":"算符优先分析法 按照算符的优先关系和结合性质进行语法分析 LR分析法(重点) 规范规约:句柄作为可归约串","s":"5.2 方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#52-方法","p":24},{"i":68,"t":"移入:将下一个输入符号移到栈顶 归约:被归约的符号串的右端处于栈顶,语法分析器在栈中确定这个串的左端非终结符来替换该串 接受:宣布语法分析过程成功完成 报错:发现一个语法错误,并调用错误恢复子程序","s":"5.4 移入-归约分析器的4种动作","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#54-移入-归约分析器的4种动作","p":24},{"i":70,"t":"前导知识:4种项目状态 归约项目:·在最后 接受项目:拓广文法的开始符号的产生式,且·在最后 移进项目:·后面是终结符VTV_TVT​ 待约项目:·后面是非终结符VNV_NVN​ 移入-归约分析 LR(0)分析表 / 构造其识别活前缀DFA https://www.bilibili.com/video/BV1pL4y1E7RE/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533 在写预测分析表的reduce项时,action的每一列都要写 SLR(1)分析表 / 构造其识别活前缀DFA https://www.bilibili.com/video/BV12u411S7Us/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533 在写预测分析表的reduce项时,只写产生式左部的FOLLOW集对应的action列 LR(1)分析表 / 构造其识别活前缀DFA https://www.bilibili.com/video/BV1Vm4y1Q7XB/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533 在构造项目集时,要加入前向搜索符;并且,在写预测分析表的reduce项时只写前向搜索符对应的action列 LALR(1)分析表 / 构造其识别活前缀DFA 在构造项目集时,要加入前向搜索符,但是要合并同心集,把相同表达式但是不同前向搜索符的前向搜索符合并,并且在写预测分析表的reduce项时只写前向搜索符集对应的action列 https://www.bilibili.com/video/BV13r4y1m7sQ/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533","s":"5.5 重要题型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#55-重要题型","p":24},{"i":73,"t":"词法分析:从左到右扫描源程序,识别出各个单词,确定单词类型并形成单词序列,进行词法错误检查,对标识符进行登记,即符号表管理 语法分析:从词法分析输出的单词序列识别出各类短语,构造语法分析树,并进行语法错误检查 语义分析:审查程序是否具有语义错误,为代码生成阶段收集类型信息,不符合规范时报错(符号表是语义正确性检查的依据) 中间代码生成:生成中间代码,如三地址指令、四元式、波兰式、逆波兰式、树形结构等 代码优化:对代码进行等价变换以求提高执行效率,提高速度或节省空间 目标代码生成:将中间代码转化成目标机上的机器指令代码或汇编代码(符号表是对符号分配地址的依据)","s":"1 编译程序各阶段功能","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#1-编译程序各阶段功能","p":24},{"i":75,"t":"就产生语法树的方向而言,可大致分为自顶向下的语法分析和自底向上的语法分析两大类。 自顶向下的语法分析方法:主流方法为递归下降分析法。根据当前的输入符号唯一地确定选用哪个产生式替换相应的非终结符以往下推导。 自底向上的语法分析方法:将输入串w归约为文法开始符号S的过程。 提示 LR(0), SLR(1), LR(1) LR(0)文法可能存在移进-归约冲突、归约-归约冲突 SLR(1)文法在构造的过程中不存在归约-归约冲突,但有可能出现移进-归约冲突,可以由FOLLOW集解决的话则是SLR(1)文法","s":"2 语法分析方法的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-语法分析方法的概念","p":24},{"i":77,"t":"翻译模式是适合语法制导语义计算的另一种描述形式,可以体现一种合理调用语义动作的算法。 S-翻译模式: 仅涉及综合属性的翻译模式,通常将语义动作集合置于产生式右端末尾。 L-翻译模式: 既可以包含综合属性,也可以包含继承属性。","s":"3 翻译模式","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-翻译模式","p":24},{"i":79,"t":"在文法基础上,为文法符号关联有特定意义的属性,并为产生式关联相应的语义动作,称之为属性文法。 S-属性文法: 只包含综合属性的属性文法成为S-属性文法 L-属性文法: 可以包含综合属性,也可以包含继承属性,但要求产生式右部的文法符号的继承属性的计算只取决于该符号左边符号的属性","s":"4 属性文法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#4-属性文法","p":24},{"i":81,"t":"符号表是编译程序中用于收集标识符的属性信息的数据结构。 各阶段作用: 语义分析阶段:语义合法性检查的依据 目标代码生成阶段:对符号名进行地址分配的依据","s":"5 符号表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-符号表","p":24},{"i":85,"t":"GeekOS是一个基于x86体系结构的微操作系统内核. 由美国马理兰大学的教师开发, 主要用于操作系统课程设计的教育. 出于教学目的, 这个系统内核设计简单, 却又兼备实用性, 它可以运行在真正的X86 PC硬件平台. 在下载好GeekOS后, 在geekos-version/src/目录下会存在project0-project6这7个文件夹, 分别代表GeekOS设计的7个学习任务. 在环境搭建完成之后, 我们进行的每一个项目的代码编写几乎都在geekos-version/src/projecti/src/geekos/文件夹下, 每一个项目的编译都在geekos-version/src/projecti/build文件夹下进行, 即要在终端中通过cd进入该目录, 再执行make depend和make命令.","s":"1. GeekOS:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-geekos","p":82},{"i":87,"t":"bochs是一个x86硬件平台的模拟器. GeekOS运行依托于bochs. 在安装好Linux操作系统后需要安装bochs以及nasm, 以完成GeekOS环境的搭建.","s":"2. bochs:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-bochs","p":82},{"i":89,"t":"GeekOS的开发环境可分为两部分, 一部分是编译环境, 一部分是运行环境. 在编译过程中, 使用Linux自带的编译环境以及编译命令对特定的GeekOS project进行编译即可. 首先在终端中通过cd命令进入geekos-version/src/projecti/build目录, 再执行make depend和make命令. 编译后生成bochs的镜像文件fd.img, 这是bochs运行所必须的文件,也是GeekOS运行环境的前置配置.","s":"3. 二者之间的关系","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#3-二者之间的关系","p":82},{"i":91,"t":"安装其实非常简单, 这里主要花篇幅介绍安装后解决报错的配置.","s":"二、安装与配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#二安装与配置","p":82},{"i":93,"t":"需要下载GeekOS Files, 安装bochs, nasm等. GeekOS直接下载压缩包, 解压即可. arch系用户通过以下命令即可完成bochs和nasm的安装. yay -S bochs nasm 其他发行版的安装方法这里不再赘述, 可选择从群文件里下载源文件并编译安装, 师兄师姐也在群文件里给了一些教程指导.","s":"1. 安装","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-安装","p":82},{"i":95,"t":"完成安装后, 我们就可以开始对project0中的代码进行完善了, 并在geekos-version/src/project0/build目录下执行make depend以及make命令, 目的是编译project0的代码, 生成bochs的镜像文件fd.img以构建GeekOS的运行环境. 但很多报错就是在make这一步产生的, 因此在安装完成后还需要进行配置. 配置分为两部分, 一个是对GeekOS中makefile的修改, 另一部分是对bochs的配置文件的修改. GeekOS中makefile的配置​ 综合网上很多师兄师姐的博客,这三个错误应该是每个人都会遇到的,所以当你不确定自己能不能运行时,请全部完成这三个步骤. 问题: warnings being treated as errors 解决方案: 修改geekos-version/src/projecti/build目录下的makefie文件(由于每个project下都存在一个对应的makefile文件, 所以在每个项目编译前都要修改一次) // 修改第149行: CC_GENERAL_OPTS := $(GENERAL_OPTS) -Werror // 修改后: CC_GENERAL_OPTS := $(GENERAL_OPTS) 问题: X86_64与i386输出不兼容 解决方案: 修改geekos-version/src/projecti/build目录下的makefie文件 # Target C compiler. gcc 2.95.2 or later should work. 100行 TARGET_CC := $(TARGET_CC_PREFIX)gcc -m32 # Host C compiler. This is used to compile programs to execute on # the host platform, not the target (x86) platform. On x86/ELF # systems, such as Linux and FreeBSD, it can generally be the same # as the target C compiler. 106行 HOST_CC := gcc -m32 # Target linker. GNU ld is probably to only one that will work.109行 TARGET_LD := $(TARGET_CC_PREFIX)ld -m elf_i386 问题: undefined reference to '__stack_chk_fail' 解决方案: 修改geekos-version/src/projecti/build目录下的makefie文件 # Flags used for all C source files // 修改前:148行 GENERAL_OPTS := -O -Wall $(EXTRA_C_OPTS) // 修改后: GENERAL_OPTS := -O -Wall -fno-stack-protector $(EXTRA_C_OPTS) bochs配置文件的修改​ 在geekos-version/src/projecti/build目录下创建.bochsrc文件 # An example .bochsrc file. # You will need to edit these lines to reflect your system. vgaromimage: file=/usr/local/share/bochs/VGABIOS-lgpl-latest # 请根据自己的实际安装路径更改 romimage: file=/usr/local/share/bochs/BIOS-bochs-latest # 请根据自己的实际安装路径更改 megs: 8 boot: a floppya: 1_44=fd.img, status=inserted #floppya: 1_44=fd_aug.img, status=inserted log: ./bochs.out # keyboard_serial_delay: 200 # vga_update_interval: 300000 mouse: enabled=0 private_colormap: enabled=0 # i440fxsupport: enabled=0 # Uncomment this to write all bochs debugging messages to # bochs.out. This produces a lot of output, but can be very # useful for debugging the kernel. #debug: action=report 到此为止, 所有的配置工作已经完成, 可以正常的进行下一步的代码完善. 如果需要验证自己是否配置成功, 可以参照下一篇博客GeekOS project 0的实现, 在本篇博客中会有完整的C语言代码编写以及编译、使用bochs执行的过程.","s":"2. 配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-配置","p":82},{"i":98,"t":"MIPS=一个周期可执行的指令条数/(周期*10^6) CPI代表一条指令需要执行几个周期,则一个周期可执行的指令条数等于CPI的倒数 故MIPS=频率/(CPI*10^6)","s":"一、基础知识","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#一基础知识","p":96},{"i":101,"t":"一般情况下,数据在Cache以及主存中是以字为单位进行编码的 Cache与主存是以字块为单位进行数据交换的 Cache透明性分析:从主存中读的时候一定调入Cache,写的时候不一定: 按写分配:向主存中写入的时候同时调入Cache 不按写分配:向主存中写入的时候不调入Cache 在解答Cache与主存采用组相联、LRU替换算法进行访问主存序列流的类型题时,注意组相联,要将Cache分为组号和块号,分开进行更新: 先对主存地址求余,余数即为其在Cache中的组号 在组内是全相联映像,使用LRU替换算法进行替换操作 Cache预取算法命中率的计算 H′=H+n−1n(1)H'=\\frac{H+n-1}{n} \\tag{1}H′=nH+n−1​(1) 其中,nnn为Cache块大小与数据块重复使用次数的乘积,HHH为原来的命中率","s":"三、存储系统","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#三存储系统","p":96},{"i":103,"t":"当采用预留算法进行最优调度时,设最小平均间隔周期为xxx,则在第一个任务完成后,每隔xxx时钟周期流出一个任务 最小间隔周期的确定: 预约表中最多√数一行中的√数 通过次数最多的功能段的通过次数 此时,该功能段也就是瓶颈段","s":"四、流水线","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#四流水线","p":96},{"i":108,"t":"在上一篇博客中我们完成了GeekOS环境的配置,下面我们来验证环境配置的成功与否以及project 0的实现。","s":"GeekOS project 0的实现","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"","p":107},{"i":110,"t":"编写geekos-version/src/projecti/src/geekos/main.c文件 编写函数project0实现检测键盘输入Ctrl+d结束线程。 void project0(){ Print(\"To Exit hit Ctrl + d.\\n\"); Keycode keycode; while(1) { if(Read_Key(&keycode)) { if(!((keycode & KEY_SPECIAL_FLAG) || (keycode & KEY_RELEASE_FLAG)))// 不是特殊键或者弹起 { int asciiCode = keycode & 0xff;//d if((keycode & KEY_CTRL_FLAG)==KEY_CTRL_FLAG && asciiCode=='d')//ctrl+d { Print(\"\\n---------Adios!---------\\n\"); # 这里需要注意素质 Exit(1); }else { Print(\"%c\",(asciiCode=='\\r') ? '\\n' : asciiCode); } } } } } 在main函数中添加以下代码,实现自定义函数的调用,创建线程。 struct Kernel_Thread *thread; thread = Start_Kernel_Thread(&project0,0,PRIORITY_NORMAL,false); 总体代码 /* * GeekOS C code entry point * Copyright (c) 2001,2003,2004 David H. Hovemeyer * Copyright (c) 2003, Jeffrey K. Hollingsworth * Copyright (c) 2004, Iulian Neamtiu * $Revision: 1.51 $ * * This is free software. You are permitted to use, * redistribute, and modify it as specified in the file \"COPYING\". */ #include #include #include #include #include #include #include #include #include #include #include void project0(){ Print(\"To Exit hit Ctrl + d.\\n\"); Keycode keycode; while(1) { if(Read_Key(&keycode)) { if(!((keycode & KEY_SPECIAL_FLAG) || (keycode & KEY_RELEASE_FLAG)))// 不是特殊键或者弹起 { int asciiCode = keycode & 0xff;//d if((keycode & KEY_CTRL_FLAG)==KEY_CTRL_FLAG && asciiCode=='d')//ctrl+d { Print(\"\\n---------Adios! Motherfucker!---------\\n\"); Exit(1); }else { Print(\"%c\",(asciiCode=='\\r') ? '\\n' : asciiCode); } } } } } /* * Kernel C code entry point. * Initializes kernel subsystems, mounts filesystems, * and spawns init process. */ void Main(struct Boot_Info* bootInfo) { Init_BSS(); Init_Screen(); Init_Mem(bootInfo); Init_CRC32(); Init_TSS(); Init_Interrupts(); Init_Scheduler(); Init_Traps(); Init_Timer(); Init_Keyboard(); Set_Current_Attr(ATTRIB(BLACK, GREEN|BRIGHT)); Print(\"Welcome to GeekOS!\\n\"); Set_Current_Attr(ATTRIB(BLACK, GRAY)); // TODO(\"Start a kernel thread to echo pressed keys and print counts\"); struct Kernel_Thread *thread; thread = Start_Kernel_Thread(&project0,0,PRIORITY_NORMAL,false); /* Now this thread is done. */ Exit(0); }","s":"1. 编写C语言代码","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#1-编写c语言代码","p":107},{"i":112,"t":"每一个项目的编译都在geekos-version/src/projecti/build文件夹下进行,即要在终端中通过cd进入该目录。 执行 make depend make 此时,该目录下会生成bochs.out、depend.mak以及fd.img文件,bochs.out文件是日志输出文件,depend.mak是编译中间生成的文件,最终生成的fd.img是最重要的GeekOS映像文件,有了它才能使用bochs运行GeekOS操作系统。感恩它! 目录下的文件应该是这样的结构: 下面就可以使用bochs运行GeekOS系统了,可以说bochs的运行依赖两个文件,一个是配置文件.bochsrc,一个是映像文件fd.img,映像文件的加载路径需要在.bochsrc文件中定义,在环境配置的博客中已经介绍过了。这里再贴一下内容。 # An example .bochsrc file. # You will need to edit these lines to reflect your system. vgaromimage: file=/usr/local/share/bochs/VGABIOS-lgpl-latest # 请根据自己的实际安装路径更改 romimage: file=/usr/local/share/bochs/BIOS-bochs-latest # 请根据自己的实际安装路径更改 megs: 8 boot: a floppya: 1_44=fd.img, status=inserted #floppya: 1_44=fd_aug.img, status=inserted log: ./bochs.out # keyboard_serial_delay: 200 # vga_update_interval: 300000 mouse: enabled=0 private_colormap: enabled=0 # i440fxsupport: enabled=0 # Uncomment this to write all bochs debugging messages to # bochs.out. This produces a lot of output, but can be very # useful for debugging the kernel. #debug: action=report 在这个目录下打开终端,执行 bochs 选择6,按下回车 可能会出现黑屏情况,这是因为进入了调试模式,终端正在等待命令,在终端输入 c 即可完成bochs的正式启动,最终的效果","s":"2. 使用Linux的编译系统对C语言代码进行编译","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#2-使用linux的编译系统对c语言代码进行编译","p":107},{"i":114,"t":"提示 欢迎来到笔记本的课程学习部分","s":"Welcome","u":"/docs/课程学习/intro","h":"","p":113},{"i":116,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/课程学习/intro","h":"#支持我","p":113},{"i":118,"t":"提示 Grateful for all the conveniences provided by Docusaurus! Grateful for Sonder's treasure trove of notebooks! Grateful for the blue sky and also the white clouds!","s":"鸣谢","u":"/docs/鸣谢/intro","h":"","p":117},{"i":120,"t":"提示 大数除法是指被除数大小超出long long范围,而导致必须使用字符串存储的除法,属于简单模拟的范畴","s":"大数除法","u":"/docs/推免/机试/大数除法","h":"","p":119},{"i":122,"t":"通过模拟列竖式手动计算除法,实现使用字符串存储被除数的大数除法","s":"思路","u":"/docs/推免/机试/大数除法","h":"#思路","p":119},{"i":124,"t":"string division(string s, int divisor) { /* * 通过模拟列竖式手算除法完成字符串存储的大数除法 */ string quotient; // 商 int idx = 0; // 当前处理的数字在原始字符串中的位置 int remainder = 0; // 余数 int temp = 0; while (idx < s.size()) { // 一直循环处理到索引等于长度 temp = remainder * 10 + (s[idx] - '0'); // 当前进行除法运算的temp if (temp >= divisor) { // 如果能除的动,则将当前的商插入quotient,并更新余数 quotient.push_back(temp / divisor + '0'); remainder = temp % divisor; } else { // 除不动时分两种情况 if (!quotient.empty()) { // 商目前不为空,此时按照竖式方法,需要向商中加入0,再接着下一次循环 quotient.push_back('0'); } remainder = temp; // 商目前为空,按照竖式计算方法,只更新余数,商保持为空 } idx++; // 更新索引位置 } if (quotient.empty()) { // 如果一直除不动,循环结束商还为空,则赋值为0字符串 quotient.assign(\"0\"); } return quotient; // 返回商字符串 }","s":"参考代码","u":"/docs/推免/机试/大数除法","h":"#参考代码","p":119},{"i":126,"t":"将大数除法与进制转换相结合。 提示 北京大学机试真题,N诺链接 完整代码如下: #include using namespace std; string division(string s, int divisor) { /* * 通过模拟列竖式手算除法完成字符串存储的大数除法 */ string quotient; // 商 int idx = 0; // 当前处理的数字在原始字符串中的位置 int remainder = 0; // 余数 int temp = 0; while (idx < s.size()) { // 一直循环处理到索引等于长度 temp = remainder * 10 + (s[idx] - '0'); // 当前进行除法运算的temp if (temp >= divisor) { // 如果能除的动,则将当前的商插入quotient,并更新余数 quotient.push_back(temp / divisor + '0'); remainder = temp % divisor; } else { // 除不动时分两种情况 if (!quotient.empty()) { // 商目前不为空,此时按照竖式方法,需要向商中加入0,再接着下一次循环 quotient.push_back('0'); } remainder = temp; // 商目前为空,按照竖式计算方法,只更新余数,商保持为空 } idx++; // 更新索引位置 } if (quotient.empty()) { // 如果一直除不动,循环结束商还为空,则赋值为0字符串 quotient.assign(\"0\"); } return quotient; // 返回商字符串 } int main() { string s; while (cin >> s) { vector vec; int len = s.size(); while (s != \"0\") { int remainder = (s[len - 1] - '0') % 2; vec.push_back(remainder); s = division(s, 2); len = s.size(); } if (vec.empty()) { cout << \"0\"; } else { for (auto it = vec.rbegin(); it != vec.rend(); it++) { cout << *it; } } cout << endl; } return 0; }","s":"扩展","u":"/docs/推免/机试/大数除法","h":"#扩展","p":119},{"i":129,"t":"树的性质: 一棵 N 个结点的树有 N-1 条边 树的总度数+1=树的结点数 树的度=树中度最大结点的度数 二叉树的性质: 叶子结点数等于度为 2 的结点数加 1,即n0 = n2 + 1 树转化为二叉树: 参考资料:知乎 加线。在所有的兄弟结点之间加一条线。 去线。树中的每个结点,只保留它与第一个孩子结点的连线,删除其他孩子结点之间的连线。 调整。每个结点的原来的孩子是结点的左孩子,由原来的兄弟结点转过来的孩子是结点的右孩子。 二叉排序树:每个结点的左子树上的所有结点值都更小,每个结点的右子树上的所有结点的值都更大。 平衡二叉排序树:要么是空树,要么左子树的高度与右子树的高度之差小于等于1。","s":"树","u":"/docs/推免/计算机基础综合/数据结构","h":"#树","p":127},{"i":131,"t":"图的表示: 邻接矩阵 邻接表:每一行表示的是一个顶点所连接的顶点,链表不具有指向性 邻接表的搜索 最小生成树:在连通网的所有生成树中,所有边的代价和最小的生成树,称为最小生成树。 Kruskal算法 Prim算法 最短路径 ​ ​","s":"图","u":"/docs/推免/计算机基础综合/数据结构","h":"#图","p":127},{"i":134,"t":"简述大数定理。 大数定理描述了大样本情况下随机变量的均值与其期望值之间的关系。对于独立同分布的随机变量序列,随着样本数量的增加,样本均值会以较高的概率接近其期望值。 简述中心极限定理。 当独立随机变量的数量足够大时,它们的和(或平均值)的分布会逐渐接近一个正态分布。即使原始随机变量不服从正态分布,但当样本容量足够大时,和(或平均值)的分布仍然呈现出正态分布的特征。 什么是全概率公式。 对于事件A而言,假设有一组互斥且穷尽的条件事件B,则事件A的概率等于事件A在每个条件事件下发生的概率与该条件事件发生概率的乘积和。 什么是最大似然估计。 基本思想是在已知观测数据的情况下,通过调整参数的取值,找到使得观测数据出现概率最大的参数值。 大致过程: 构建参数化的概率模型,即构建似然函数,表示在给定参数下观测数据出现的概率 取似然函数的对数,方便计算与优化 最大化似然函数,求解参数的最优值 简述贝叶斯定理。 贝叶斯定理描述了在给定观测数据的条件下,计算事件的后验概率的方法。 P(A∣B)=P(B∣A)∗P(A)P(B)P(A|B) = \\frac{P(B|A) * P(A)}{P(B)}P(A∣B)=P(B)P(B∣A)∗P(A)​ 其中: P(A∣B)P(A|B)P(A∣B)表示在观测到事件 B 发生的条件下,事件 A 发生的概率,称为后验概率 P(B∣A)P(B|A)P(B∣A)表示在事件 A 发生的条件下,事件 B 发生的概率,称为似然; P(A)P(A)P(A)和P(B)P(B)P(B)分别是事件 A 和事件 B 独立发生的先验概率。 优点:它能够将主观先验知识与观测数据相结合,通过不断更新后验概率来进行推断和决策。 P问题、NP问题以及NP完全问题 提示 P stands for Polynomial 意为多项式 P问题是可以在多项式时间内解决的问题 NP问题是可以在多项式时间内验证解的正确性的问题 NP完全问题是一类特殊的NP问题,没有已知的高效解决算法,并且可以在多项式时间内归约到任何其他的NP问题","s":"面试常考问题","u":"/docs/推免/数学/概率论","h":"#面试常考问题","p":132},{"i":138,"t":"显著性目标检测Salient Object Detection,相当于语义分割中的二分类任务,只有前景和背景","s":"(一)SOD任务","u":"/docs/推免/简历/简历面试准备","h":"#一sod任务","p":135},{"i":140,"t":"下图为U-2-Net的整体结构 提示 residual [rɪˈzɪdjuəl] 在encoder阶段,每个block之后使用maxpooling下采样两倍 在decoder阶段,每个block之后使用双线性插值上采样两倍 下图为Residual U-block的结构 提示 卷积是如何改变输出的通道数的? 卷积核的通道数等于输入的通道数,卷积核的个数等于输出的通道数 图片来源知乎 在特征融合阶段,每一层的encoder-decoder输出,使用3x3卷积以及双线性插值上采样到原始分辨率得到该层的特征图,且卷积核的个数为1,输出的feature map通道数也为1。将每一层的feature map进行concat拼接,得到6通道的融合feature map,最后使用1x1卷积以及sigmoid激活函数得到最终的融合特征图输出","s":"(二)网络结构","u":"/docs/推免/简历/简历面试准备","h":"#二网络结构","p":135},{"i":142,"t":"损失函数是7个损失项的加权求和 共有6层encoder-decoder结构,将每一层对应的feature map与ground truth做BCE Loss得到6个损失项 第7个损失项是最终融合得到的feature map与ground truth的BCE Loss 在论文中,每个损失项的权重都为1 canny边缘检测: 使用高斯滤波进行平滑 计算像素梯度 非极大值抑制 双阈值检测强边缘、弱边缘 边缘连接","s":"(三)损失函数","u":"/docs/推免/简历/简历面试准备","h":"#三损失函数","p":135},{"i":144,"t":"深度可分离卷积的优点是可以在大致保持卷积效果的情况下减少参数量 在实现原理上可分为两个步骤:深度卷积(depth wise)以及逐点(point wise)卷积 深度卷积是一种在每个输入通道上分别进行卷积操作的卷积方法,每个输入通道只与对应的卷积核进行卷积。 逐点卷积通过使用1×11 \\times 11×1卷积对深度卷积的结果再次卷积","s":"(四)深度可分离卷积","u":"/docs/推免/简历/简历面试准备","h":"#四深度可分离卷积","p":135},{"i":147,"t":"PR曲线所围成的面积即使该类的AP值","s":"(一)mAP","u":"/docs/推免/简历/简历面试准备","h":"#一map","p":135},{"i":149,"t":"提示 参考资料:【精读AI论文】YOLO V1目标检测,看我就够了 1.预测阶段​ 下图为YOLOv1的算法框架 下图为YOLOv1的网络结构 输入[448, 448, 3]图像,输出[7, 7, 30]的tensor(包含所有预测框的坐标、置信度和类别结果),通过解析输出的tensor得到预测结果 首先将输入图片划分为S×SS \\times SS×S个grid cell。在YOLOv1中S=7S=7S=7 每个grid cell预测出BBB个bounding box预测框(bbox),每个bbox的中心点都落在该grid cell中。在YOLOv1中B=2B=2B=2 每个bbox包含(x, y, h, w, c)五种信息,其中x, y为bbox左上角坐标,h, w为bbox的宽高,c为该bbox是否存在object的概率 同时每个grid cell预测出一组与数据集有关的条件类别概率。在YOLOv1论文使用的数据集Pascal VOC中,类别种类为20类,因此在预测阶段输出的[7, 7, 30]的tensor含义如下图所示 每个grid cell选出条件类别概率最大的类别,因此每个grid cell只能检测一个物体 提示 这也是YOLOv1小目标和密集目标识别能力差的原因 每个bbox的置信度与其父grid cell的类别概率相乘得到全概率,如下图所示 进行NMS后处理: 对某一特定类别,首先根据全概率置信度排序 将此时最大置信度的bbox与其他所有置信度更小的bbox做IoU判断,若IoU大于设置的阈值,则抹除置信度小的bbox 将剩余的次大的置信度重复步骤2,抹除所有置信度更小的其IoU超过阈值的bbox 提示 非极大值抑制只在预测阶段进行 在训练阶段,所有bbox都会在Loss Function中起到更新的作用,因此不进行NMS 2. 训练过程的损失函数​","s":"(二)YOLOv1","u":"/docs/推免/简历/简历面试准备","h":"#二yolov1","p":135},{"i":151,"t":"1. BN层​ 2. 高分辨率训练​ 3. Anchor​ YOLOv2引入了anchor机制代替bbox,将图像划分为13×1313 \\times 1313×13个grid cell,每个grid cell生成5个anchor anchor是通过k-means聚类在数据集上生成的不同尺寸的先验框 对数据集进行anchor宽高比的聚类,聚类数越大,覆盖的IoU越大,但同时模型也更复杂","s":"(二)YOLOv2","u":"/docs/推免/简历/简历面试准备","h":"#二yolov2","p":135},{"i":153,"t":"1. 特征融合​ YOLOv5使用CSPNet实现特征融合,CSP模块由主干和分支构成,主干提取低维特征,分支提取高维特征 主干通过卷积和池化提取特征,形成不同尺寸的特征图 分支将主干输出的特征图作为输入,逐步卷积和上采样提取高级别语义特征 主干特征图通过卷积对通道数降维之后与分支在通道维度上concat 提示 在特征提取以及融合阶段可以加入Canny边缘检测得到的特征图进行特征融合 2. 前处理​ 对填充黑色像素进行了改善,以填充更少的黑像素,提高了精度 3. 特征金字塔FCN​","s":"(三)YOLOv5","u":"/docs/推免/简历/简历面试准备","h":"#三yolov5","p":135},{"i":155,"t":"::: 有关CSP特征融合可以参考:https://blog.csdn.net/weixin_55073640/article/details/122614176 ::: CBAM是通道+空间注意力机制(SENet是通道注意力机制)","s":"三、CBAM","u":"/docs/推免/简历/简历面试准备","h":"#三cbam","p":135},{"i":157,"t":"通道注意力:原始特征图[b,c,h,w][b, c, h, w][b,c,h,w]经过通道注意力机制算法得到[b,c,1,1][b, c, 1, 1][b,c,1,1]的tensor,代表不同通道之间的重要程度,将其与原始特征图相乘 空间注意力:经过通道注意力的特征图[b,c,h,w][b, c, h, w][b,c,h,w]经过空间注意力机制算法得到[b,1,h,w][b, 1, h, w][b,1,h,w]的tensor,代表宽高维度的像素之间的重要程度,将其与原始特征图相乘","s":"(一)总体结构","u":"/docs/推免/简历/简历面试准备","h":"#一总体结构","p":135},{"i":159,"t":"原始特征图[b,c,h,w][b, c, h, w][b,c,h,w]分别经过最大池化和平均池化来压缩空间维度、学习通道之间的特征,得到[b,c,1,1][b, c, 1, 1][b,c,1,1]的tensor,再送入共享的多层感知机网络进行降维再升维,最后将二者相加再经过sigmoid函数产生最终的通道注意力特征图","s":"(二)通道注意力","u":"/docs/推免/简历/简历面试准备","h":"#二通道注意力","p":135},{"i":161,"t":"原始特征图[b,c,h,w][b, c, h, w][b,c,h,w]分别经过最大池化和平均池化(通过torch.max和torch.mean函数实现)得到[b,1,h,w][b, 1, h, w][b,1,h,w]的tensor,再将二者concat后通过7×77 \\times 77×7卷积学习特征并降维,最后送入sigmoid函数得到最终的空间注意力特征图","s":"(三)空间注意力","u":"/docs/推免/简历/简历面试准备","h":"#三空间注意力","p":135},{"i":163,"t":"作者分别对通道注意力以及空间注意力使用最大池化还是平均池化做了消融实验,结果反映二者都用最大池化以及平均池化再相加效果最好(且对于7×77 \\times 77×7卷积与3×33 \\times 33×3卷积的消融实验发现,7×77 \\times 77×7卷积效果更好) 作者对先通道注意力还是先空间注意力做了消融实验,结果发现先通道再空间效果更好","s":"(四)其他注意事项","u":"/docs/推免/简历/简历面试准备","h":"#四其他注意事项","p":135},{"i":165,"t":"Focal Loss通过引入修正项和样本关注度超参数,增加困难样本的关注度,来解决类别不均衡问题。 YOLO损失函数分为分类损失以及回归损失,可以在分类损失中引入Focal Loss代替原来的交叉熵损失","s":"四、Focal Loss","u":"/docs/推免/简历/简历面试准备","h":"#四focal-loss","p":135},{"i":167,"t":"Squeeze and Excitation Squeeze挤压操作就是将[b,c,h,w][b, c, h, w][b,c,h,w]的特征图通过池化挤压宽高维度,得到[b,c,1,1][b, c, 1, 1][b,c,1,1]的tensor,该tensor还要经过所示的全连接层-ReLU-全连接层结构 Excitation激励操作就是通过sigmoid函数得到每个通道之间的重要程度系数","s":"五、SENet","u":"/docs/推免/简历/简历面试准备","h":"#五senet","p":135},{"i":169,"t":"自注意力机制通过计算元素之间的相似度来确定它们之间的关联性,并对其进行加权处理以获得上下文信息。 自注意力机制通过对输入的元素进行线性变换来得到查询(Query)向量、键(Key)向量和值(Value)向量。 通过点积和缩放点积计算相似程度 通过自注意力机制,每个元素都可以通过与其他元素的相似度计算和加权求和,获取到与它们相关的上下文信息。相似度高的元素将获得更高的权重,因此更受到关注和影响,从而建立起元素之间的关联性。","s":"六、自注意力机制","u":"/docs/推免/简历/简历面试准备","h":"#六自注意力机制","p":135},{"i":172,"t":"This content has been encrypted.","s":"(一)英文自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#一英文自我介绍","p":135},{"i":174,"t":"1. 英文自我介绍​ This content has been encrypted. 2. 中文自我介绍​ This content has been encrypted.","s":"(二)西电广研院自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#二西电广研院自我介绍","p":135},{"i":176,"t":"1. 英文自我介绍​ This content has been encrypted. 2. 中文自我介绍​ This content has been encrypted.","s":"(三)电子科技大学自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#三电子科技大学自我介绍","p":135},{"i":179,"t":"线性相关与线性无关:向量组中的任一向量都不能被其它向量线性表示,就说向量组线性无关;否则就是线性相关。 矩阵转置:将矩阵的行和列互相交换 矩阵求逆:对于方阵A,若存在方阵B使得AB=BA=单位方阵I,则方阵B为方阵A的逆矩阵,记为A−1A^{-1}A−1 线性代数中的初等行变换。 交换两行 用非零常数乘以某一行 用一行的倍数加到另一行上 如何理解矩阵的秩。 矩阵的秩是指矩阵的列空间(或行空间)的维数,简而言之是矩阵中所有非零行(或列)向量构成的集合所组成的最大线性无关组的向量个数。 提示 宋浩八字:非零子式的最高阶数 任意矩阵的行秩都等于列秩。 矩阵的秩与线性方程组解的关系。 对于n元线性方程组而言: 当系数矩阵的秩等于增广矩阵的秩且秩等于n时,有唯一解 当系数矩阵的秩等于增广矩阵的秩且秩大于n时,有无穷多解 当系数矩阵的秩不等于增广矩阵的秩时,无解 提示 当系数矩阵的秩小于增广矩阵的秩时,说明系数矩阵中的某一列向量(或行向量)可以被其他列向量(或行向量)线性表示,此时该行不能提供额外的线性独立信息 简述向量组线性无关的含义。 含义:若一个向量组是线性无关的,则该向量组中的每个向量都不能表示成其他向量的线性组合。 意义:如果一个向量组线性无关,那么该向量组所张成的空间就是一个最小维度的向量空间,并且该向量空间中的任何向量都可由这些向量线性组合表示。 判定方法:如果一个向量组中的所有向量都不可以由其他向量线性组合得到,则称该向量组为线性无关的。否则,如果存在某个向量可以表示成其他向量的线性组合,则该向量组就不是线性无关的。 解释正定矩阵以及半正定矩阵。 简述特征值的含义。 特征值描述了矩阵在特定方向(特征向量方向)上的缩放因子,特征向量表示矩阵在这个特定方向上的不变性。 简述矩阵分解的物理意义。 矩阵分解是将一个矩阵表示为一些特定形式的矩阵乘积的过程。 矩阵分解的种类以及物理意义: LU分解:将矩阵分解为一个下三角矩阵和一个上三角矩阵的乘积。物理意义包括解线性方程组、计算矩阵的行列式和逆矩阵等。 QR分解:将矩阵分解为一个正交矩阵和一个上三角矩阵的乘积。物理意义包括最小二乘问题、矩阵的特征值计算等。 特征值分解:将矩阵分解为一个特征向量矩阵和一个对角矩阵的乘积。物理意义包括矩阵的幂、指数和对称矩阵的对角化等。 奇异值分解(SVD):将矩阵分解为一个正交矩阵、一个对角矩阵和一个正交矩阵的乘积。物理意义包括降维、矩阵逼近和图像压缩等。","s":"一、线性代数","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#一线性代数","p":177},{"i":181,"t":"简述大数定理。 大数定理描述了大样本情况下随机变量的均值与其期望值之间的关系。对于独立同分布的随机变量序列,随着样本数量的增加,样本均值会以较高的概率接近其期望值。 简述中心极限定理。 当独立随机变量的数量足够大时,它们的和(或平均值)的分布会逐渐接近一个正态分布。即使原始随机变量不服从正态分布,但当样本容量足够大时,和(或平均值)的分布仍然呈现出正态分布的特征。 什么是全概率公式。 对于事件A而言,假设有一组互斥且穷尽的条件事件B,则事件A的概率等于事件A在每个条件事件下发生的概率与该条件事件发生概率的乘积和。 什么是最大似然估计。 基本思想是在已知观测数据的情况下,通过调整参数的取值,找到使得观测数据出现概率最大的参数值。 大致过程: 构建参数化的概率模型,即构建似然函数,表示在给定参数下观测数据出现的概率 取似然函数的对数,方便计算与优化 最大化似然函数,求解参数的最优值 简述贝叶斯定理。 贝叶斯定理描述了在给定观测数据的条件下,计算事件的后验概率的方法。 P(A∣B)=P(B∣A)∗P(A)P(B)P(A|B) = \\frac{P(B|A) * P(A)}{P(B)}P(A∣B)=P(B)P(B∣A)∗P(A)​ 其中: P(A∣B)P(A|B)P(A∣B)表示在观测到事件 B 发生的条件下,事件 A 发生的概率,称为后验概率 P(B∣A)P(B|A)P(B∣A)表示在事件 A 发生的条件下,事件 B 发生的概率,称为似然; P(A)P(A)P(A)和P(B)P(B)P(B)分别是事件 A 和事件 B 独立发生的先验概率。 优点:它能够将主观先验知识与观测数据相结合,通过不断更新后验概率来进行推断和决策。 P问题、NP问题以及NP完全问题 提示 P stands for Polynomial 意为多项式 P问题是可以在多项式时间内解决的问题 NP问题是可以在多项式时间内验证解的正确性的问题 NP完全问题是一类特殊的NP问题,没有已知的高效解决算法,并且可以在多项式时间内归约到任何其他的NP问题","s":"二、概率论","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#二概率论","p":177},{"i":183,"t":"提示 欢迎来到笔记本的推免复习部分","s":"Welcome","u":"/docs/推免/intro","h":"","p":182},{"i":185,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/推免/intro","h":"#支持我","p":182},{"i":187,"t":"提示 参考链接: 线性代数极简入门 《线性代数》高清教学视频 “惊叹号”系列 宋浩老师","s":"线性代数","u":"/docs/推免/数学/线性代数","h":"","p":186},{"i":189,"t":"线性相关与线性无关:向量组中的任一向量都不能被其它向量线性表示,就说向量组线性无关;否则就是线性相关。 矩阵转置:将矩阵的行和列互相交换 矩阵求逆:对于方阵A,若存在方阵B使得AB=BA=单位方阵I,则方阵B为方阵A的逆矩阵,记为A−1A^{-1}A−1","s":"一、基础知识","u":"/docs/推免/数学/线性代数","h":"#一基础知识","p":186},{"i":191,"t":"线性代数中的初等行变换。 交换两行 用非零常数乘以某一行 用一行的倍数加到另一行上 如何理解矩阵的秩。 矩阵的秩是指矩阵的列空间(或行空间)的维数,简而言之是矩阵中所有非零行(或列)向量构成的集合所组成的最大线性无关组的向量个数。 提示 宋浩八字:非零子式的最高阶数 任意矩阵的行秩都等于列秩。 矩阵的秩与线性方程组解的关系。 对于n元线性方程组而言: 当系数矩阵的秩等于增广矩阵的秩且秩等于n时,有唯一解 当系数矩阵的秩等于增广矩阵的秩且秩大于n时,有无穷多解 当系数矩阵的秩不等于增广矩阵的秩时,无解 提示 当系数矩阵的秩小于增广矩阵的秩时,说明系数矩阵中的某一列向量(或行向量)可以被其他列向量(或行向量)线性表示,此时该行不能提供额外的线性独立信息 简述向量组线性无关的含义。 含义:若一个向量组是线性无关的,则该向量组中的每个向量都不能表示成其他向量的线性组合。 意义:如果一个向量组线性无关,那么该向量组所张成的空间就是一个最小维度的向量空间,并且该向量空间中的任何向量都可由这些向量线性组合表示。 判定方法:如果一个向量组中的所有向量都不可以由其他向量线性组合得到,则称该向量组为线性无关的。否则,如果存在某个向量可以表示成其他向量的线性组合,则该向量组就不是线性无关的。 解释正定矩阵以及半正定矩阵。 简述特征值的含义。 特征值描述了矩阵在特定方向(特征向量方向)上的缩放因子,特征向量表示矩阵在这个特定方向上的不变性。 简述矩阵分解的物理意义。 矩阵分解是将一个矩阵表示为一些特定形式的矩阵乘积的过程。 矩阵分解的种类以及物理意义: LU分解:将矩阵分解为一个下三角矩阵和一个上三角矩阵的乘积。物理意义包括解线性方程组、计算矩阵的行列式和逆矩阵等。 QR分解:将矩阵分解为一个正交矩阵和一个上三角矩阵的乘积。物理意义包括最小二乘问题、矩阵的特征值计算等。 特征值分解:将矩阵分解为一个特征向量矩阵和一个对角矩阵的乘积。物理意义包括矩阵的幂、指数和对称矩阵的对角化等。 奇异值分解(SVD):将矩阵分解为一个正交矩阵、一个对角矩阵和一个正交矩阵的乘积。物理意义包括降维、矩阵逼近和图像压缩等。","s":"二、面试常考问题","u":"/docs/推免/数学/线性代数","h":"#二面试常考问题","p":186},{"i":193,"t":"提示 设N是一个四位数,它的9倍恰好是其反序数(例如:1234的反序数是4321),求N的值","s":"反序输出","u":"/docs/Algorithms/题解/反序输出","h":"","p":192},{"i":195,"t":"#include using namespace std; int main() { for (int i = 1000; i <= 9999; i++) { int x = i * 9, y = 0; while (x > 0) { y = y * 10 + x % 10; x /= 10; } if (i == y) { cout << i << endl; } } return 0; }","s":"参考代码","u":"/docs/Algorithms/题解/反序输出","h":"#参考代码","p":192},{"i":197,"t":"反序输出可以分为两部分:拆分以及反序拼接 拆分:n位整数求余10可以得到最后一位,再除以10可以得到除去上述最后一位之后的n-1位整数,循环得到每一个最后一位,完成拆分 while (x > 0) { y = y * 10 + x % 10; // 拼接与拆分 x /= 10; } 拼接:将s中的数字拼接成整数 int sum = 0; for (int i = 0; i < s.size(); i++) { sum = sum * 10 + s[i]; }","s":"题解","u":"/docs/Algorithms/题解/反序输出","h":"#题解","p":192},{"i":199,"t":"提示 欢迎来到笔记本的算法部分","s":"Welcome","u":"/docs/Algorithms/intro","h":"","p":198},{"i":201,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Algorithms/intro","h":"#支持我","p":198},{"i":203,"t":"提示 在一面很长的墙壁上,工人们用不同的油漆去刷墙,然而可能有些地方刷过以后觉得不好看,他们会重新刷一下。有些部分因为重复刷了很多次覆盖了很多层油漆,小诺很好奇那些地方被刷过多少种颜色的油漆。 输入描述: 若干行输入,每行两个数字B[i],E[i](0<=B[i]<=E[i]<=200000)表示这次刷的墙壁是哪一段 (假设每次刷的时候油漆颜色都和之前的不同),以0 0结束 又若干行输入,每行两个数字begin[i],end[i](0<=begin[i]<=end[i]<=200000)表示小诺询问的段, 以0 0结束 输出描述: 对于每个小诺的询问输出(end[i]-begin[i]+1)行,表示对应询问段的每个点被多少种颜色的油漆覆盖过。","s":"一维前缀和(刷出一道墙)","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"","p":202},{"i":205,"t":"#include using namespace std; int main() { vector colors(200001, 0); int B, E; while (scanf(\"%d %d\", &B, &E)) { if (B == 0 && E == 0) { break; } colors[B]++; // 刷墙起点标记 colors[E + 1]--; // 刷墙终点标记 } // 计算前缀和 for (int i = 1; i < colors.size(); i++) { colors[i] += colors[i - 1]; } int begin, end; while (scanf(\"%d %d\", &begin, &end)) { if (begin == 0 && end == 0) { break; } for (int i = begin; i <= end; i++) { printf(\"%d\\n\", colors[i]); } } return 0; }","s":"参考代码","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#参考代码","p":202},{"i":207,"t":"使用前缀和思想简化时间复杂度,设计前缀和数组,使输出的数组中元素的值代表其对应节点被刷的次数。 首先初始化前缀和数组,使每一个元素等于为0。 该题的巧妙之处就在于:对于每一个输入的索引B与E,B作为开始刷的节点索引令前缀和数组中对应元素的值+1+1+1,E+1作为刷墙结束的下一个节点的索引令对应的值−1-1−1。这样在所有输入结束后的计算前缀和阶段,在每一个值为[1,−1)[1, -1)[1,−1)的索引区间中的元素值都会加1,而对于某次刷漆终点E的下一个索引为E+1的元素值由于−1-1−1而抵消影响(自身值为−1-1−1加上之前元素所累积的1而归零),此时数组中元素的值才代表其对应节点被刷的次数。 关于超时,可以在函数中加入以下代码消除流操作的缓冲区,并使用\"\\n\"代替endl。 ios::sync_with_stdio(false);","s":"题解","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#题解","p":202},{"i":209,"t":"提示 输入一个数,比如201,让数字随意组合,是否能组合出30的倍数,如果能够组合成30的倍数,就输出最大的倍数,不能就输出-1 例如输入201可以随意组合成 201,210,012,021,102,120等数字 其中120,210都是30的倍数,由于要找最大的,所以答案是210 输入样例:201 输出样例:210","s":"排列组合(求30的倍数)","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"","p":208},{"i":211,"t":"#include using namespace std; int main() { string s; cin >> s; int maxx = 0, flag = 0; sort(s.begin(), s.end()); do { int now = 0; for (int i = 0; i < s.size(); i++) { now = now * 10 + s[i] - '0'; } if (now % 30 == 0) { flag = 1; maxx = max(maxx, now); } } while (next_permutation(s.begin(), s.end())); if (flag == 1) { cout << maxx << endl; return 0; } else { cout << -1 << endl; } }","s":"参考代码","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#参考代码","p":208},{"i":213,"t":"使用C++ STL提供的排列组合模版 首先将代排列组合的字符串或数组进行排序 sort(list.begin(), list.end()); 使用排列组合模版 do { something(); } while (next_permutation(list.begin(), list.end())); 此时,在每一个do循环中,list按从小到大的顺序进行排列组合遍历","s":"题解","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#题解","p":208},{"i":215,"t":"[TOC]","s":"机试技巧与STL","u":"/docs/Algorithms/机试技巧与STL","h":"","p":214},{"i":217,"t":"CTRL + J 列出成员 Ctrl+E,D 格式化全部代码 Ctrl+K,F 格式化选中的代码 CTRL + SHIFT + E 显示资源视图 F12 转到定义 CTRL + F12 转到声明 CTRL + ALT + J 对象浏览 CTRL + ALT + F1 帮助目录 CTRL + F1 动态帮助 CTRL + K, CTRL + C 注释选择的代码 CTRL + K, CTRL + U 取消对选择代码的注释 CTRL + U 转小写 CTRL + SHIFT + U 转大写 F5 运行调试 CTRL + F5 运行不调试 F10 跨过程序执行 F11 单步逐句执行","s":"vs2018 快捷键","u":"/docs/Algorithms/机试技巧与STL","h":"#vs2018-快捷键","p":214},{"i":220,"t":"头文件 说明 头文件 说明 头文件 说明 assert.h 断言相关 ctype.h 字符类型判断 errno.h 标准错误机制 float.h 浮点限制 limits.h 整形限制 locale.h 本地化接口 math.h 数学函数 setjmp.h 非本地跳转 signal.h 信号相关 stdarg.h 可变参数处理 stddef.h 宏和类型定义 stdio.h 标准I/O stdlib.h 标准工具库 string.h 字符串和内存处理 time.h 时间相关","s":"标准c库","u":"/docs/Algorithms/机试技巧与STL","h":"#标准c库","p":214},{"i":222,"t":"using namespace std; 头文件 说明 头文件 说明 头文件 说明 algorithm 通用算法 deque 双端队列 vector 向量 iterator 迭代器 stack 栈 map 图(键值对) list 列表 string 字符串 set 集合 queue 队列 bitset bit类 numeric 数值算法","s":"c++ STL","u":"/docs/Algorithms/机试技巧与STL","h":"#c-stl","p":214},{"i":224,"t":"#include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace std;","s":"常用头","u":"/docs/Algorithms/机试技巧与STL","h":"#常用头","p":214},{"i":226,"t":"//求最大值和最小值 #define MAX(x,y) (((x)>(y)) ? (x) : (y)) #define MIN(x,y) (((x) < (y)) ? (x) : (y)) //取余 #define mod(x) ((x)%MOD) //for循环 #define FOR(i,f_start,f_end) for(int i=f_start;i<=f_end;++i) //返回数组元素的个数 #define ARR_SIZE(a) (sizeof((a))/sizeof((a[0]))) //初始化数组 #define MT(x,i) memset(x,i,sizeof(x)) #define MEM(a,b) memset((a),(b),sizeof(a)) //符号重定义 #define LL long long #define ull unsigned long long #define pii pair //常见常数 #define PI acos(-1.0) #define eps 1e-12 #define INF 0x3f3f3f3f //int最大值 const int INF_INT = 2147483647; const ll INF_LL = 9223372036854775807LL; const ull INF_ULL = 18446744073709551615Ull; const ll P = 92540646808111039LL; const ll maxn = 1e5 + 10, MOD = 1e9 + 7; const int Move[4][2] = {-1,0,1,0,0,1,0,-1}; const int Move_[8][2] = {-1,-1,-1,0,-1,1,0,-1,0,1,1,-1,1,0,1,1};","s":"常用宏定义","u":"/docs/Algorithms/机试技巧与STL","h":"#常用宏定义","p":214},{"i":229,"t":"struct InitMember { int first; double second; char* third; float four; };","s":"定义","u":"/docs/Algorithms/机试技巧与STL","h":"#定义","p":214},{"i":231,"t":"方法一:定义时赋值​ struct InitMember test = {-10,3.141590,\"method one\",0.25}; 方法二:定义后逐个赋值​ struct InitMember test; test.first = -10; test.second = 3.141590; test.third = \"method two\"; test.four = 0.25; 方法三:定义时乱序赋值(C++风格)​ struct InitMember test = { second:3.141590, third:\"method three\", first:-10, four:0.25 }; 方法四:构造函数​ //定义图的定点 typedef struct Vertex { int id,inDegree,outDegree; vector connectors; //存储节点的后续连接顶点编号 Vertex() : id(-1),inDegree(0),outDegree(0) {} Vertex(int nid) : id(nid),inDegree(0),outDegree(0) {} } Vertex; //定义Graph的邻接表表示 typedef struct Graph { vector vertexs; //存储定点信息 int nVertexs; //计数:邻接数 bool isDAG; //标志:是有向图吗 Graph(int n, bool isDAG) : nVertexs(n), isDAG(isDAG) { vertexs.resize(n); } Graph() : nVertexs(1), isDAG(1) { vertexs.resize(1); } //向图中添加边 bool addEdge(int id1, int id2) { ... ... ... return true; } } Graph; Graph g(8, false);","s":"初始化","u":"/docs/Algorithms/机试技巧与STL","h":"#初始化","p":214},{"i":233,"t":"typedef struct{int id;int h;} node; bool operator <(const node& a,const node & b){return (a.h)<(b.h);}","s":"运算符重载","u":"/docs/Algorithms/机试技巧与STL","h":"#运算符重载","p":214},{"i":236,"t":"int *x = new int; //开辟一个存放整数的存储空间,返回一个指向该存储空间的地址(即指针) int *a = new int(100); //开辟一个存放整数的空间,并指定该整数的初值为100,返回一个指向该存储空间的地址 char *b = new char[10]; //开辟一个存放字符数组(包括10个元素)的空间,返回首元素的地址 float *p=new float (3.14159);//开辟一个存放单精度数的空间,并指定该实数的初值为//3.14159,将返回的该空间的地址赋给指针变量p","s":"常规","u":"/docs/Algorithms/机试技巧与STL","h":"#常规","p":214},{"i":238,"t":"//列值固定 const int MAXCOL = 3; cin>>row; //申请一维数据并将其转成二维数组指针 int *pp_arr = new int[nRow * MAXCOL]; int (*p)[MAXCOL] = (int(*)[MAXCOL])pp_arr; //此时p[i][j]就可正常使用","s":"动态申请列大小固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请列大小固定的二维数组","p":214},{"i":240,"t":"cin>>row>>col; int **p = new int*[row]; for (int i = 0; i < row; i ++) { p[i] = new int[col]; }","s":"动态申请大小不固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请大小不固定的二维数组","p":214},{"i":242,"t":"参考: https://blog.csdn.net/f_zyj/article/details/51594851 https://download.csdn.net/download/f_zyj/9988653","s":"常用STL","u":"/docs/Algorithms/机试技巧与STL","h":"#常用stl","p":214},{"i":244,"t":"STL底层说明​ C++ STL 的实现: 1.vector 底层数据结构为数组 ,支持快速随机访问 2.list 底层数据结构为双向链表,支持快速增删 3.deque 底层数据结构为一个中央控制器和多个缓冲区,详细见STL源码剖析P146,支持首尾(中间不能)快速增删,也支持随机访问 deque是一个双端队列(double-ended queue),也是在堆中保存内容的.它的保存形式如下: [堆1] --> [堆2] -->[堆3] --> ... 每个堆保存好几个元素,然后堆和堆之间有指针指向,看起来像是list和vector的结合品. 4.stack 底层一般用list或deque实现,封闭头部即可,不用vector的原因应该是容量大小有限制,扩容耗时 5.queue 底层一般用list或deque实现,封闭头部即可,不用vector的原因应该是容量大小有限制,扩容耗时 (stack和queue其实是适配器,而不叫容器,因为是对容器的再封装) 6.priority_queue 的底层数据结构一般为vector为底层容器,堆heap为处理规则来管理底层容器实现 7.set 底层数据结构为红黑树,有序,不重复 8.multiset 底层数据结构为红黑树,有序,可重复 9.map 底层数据结构为红黑树,有序,不重复 10.multimap 底层数据结构为红黑树,有序,可重复 11.hash_set 底层数据结构为hash表,无序,不重复 12.hash_multiset 底层数据结构为hash表,无序,可重复 13.hash_map 底层数据结构为hash表,无序,不重复 14.hash_multimap 底层数据结构为hash表,无序,可重复 CCF 编译出错原因: 不允许C++STL容器嵌套(需要满足相应的格式)​ 就是要在后面的“>”之间,必须得有一个空格,如果有多层,那每层都得有一个空格。 map > user;","s":"简述","u":"/docs/Algorithms/机试技巧与STL","h":"#简述","p":214},{"i":246,"t":"头文件:lgorithm 函数参数,返回值以及具体的使用方法请自行去头文件找定义!!! 不修改内容的序列操作​ 函数 说明 adjacent_find 查找两个相邻(Adjacent)的等价(Identical)元素 all_ofC++11 检测在给定范围中是否所有元素都满足给定的条件 any_ofC++11 检测在给定范围中是否存在元素满足给定条件 count 返回值等价于给定值的元素的个数 count_if 返回值满足给定条件的元素的个数 equal 返回两个范围是否相等 find 返回第一个值等价于给定值的元素 find_end 查找范围A中与范围B等价的子范围最后出现的位置 find_first_of 查找范围A中第一个与范围B中任一元素等价的元素的位置 find_if 返回第一个值满足给定条件的元素 find_if_notC++11 返回第一个值不满足给定条件的元素 for_each 对范围中的每个元素调用指定函数 mismatch 返回两个范围中第一个元素不等价的位置 none_ofC++11 检测在给定范围中是否不存在元素满足给定的条件 search 在范围A中查找第一个与范围B等价的子范围的位置 search_n 在给定范围中查找第一个连续n个元素都等价于给定值的子范围的位置 修改内容的序列操作​ 函数 说明 copy 将一个范围中的元素拷贝到新的位置处 copy_backward 将一个范围中的元素按逆序拷贝到新的位置处 copy_ifC++11 将一个范围中满足给定条件的元素拷贝到新的位置处 copy_nC++11 拷贝 n 个元素到新的位置处 fill 将一个范围的元素赋值为给定值 fill_n 将某个位置开始的 n 个元素赋值为给定值 generate 将一个函数的执行结果保存到指定范围的元素中,用于批量赋值范围中的元素 generate_n 将一个函数的执行结果保存到指定位置开始的 n 个元素中 iter_swap 交换两个迭代器(Iterator)指向的元素 moveC++11 将一个范围中的元素移动到新的位置处 move_backwardC++11 将一个范围中的元素按逆序移动到新的位置处 random_shuffle 随机打乱指定范围中的元素的位置 remove 将一个范围中值等价于给定值的元素删除 remove_if 将一个范围中值满足给定条件的元素删除 remove_copy 拷贝一个范围的元素,将其中值等价于给定值的元素删除 remove_copy_if 拷贝一个范围的元素,将其中值满足给定条件的元素删除 replace 将一个范围中值等价于给定值的元素赋值为新的值 replace_copy 拷贝一个范围的元素,将其中值等价于给定值的元素赋值为新的值 replace_copy_if 拷贝一个范围的元素,将其中值满足给定条件的元素赋值为新的值 replace_if 将一个范围中值满足给定条件的元素赋值为新的值 reverse 反转排序指定范围中的元素 reverse_copy 拷贝指定范围的反转排序结果 rotate 循环移动指定范围中的元素 rotate_copy 拷贝指定范围的循环移动结果 shuffleC++11 用指定的随机数引擎随机打乱指定范围中的元素的位置 swap 交换两个对象的值 swap_ranges 交换两个范围的元素 transform 对指定范围中的每个元素调用某个函数以改变元素的值 unique 删除指定范围中的所有连续重复元素,仅仅留下每组等值元素中的第一个元素。 unique_copy 拷贝指定范围的唯一化(参考上述的 unique)结果 划分操作​ 函数 说明 is_partitionedC++11 检测某个范围是否按指定谓词(Predicate)划分过 partition 将某个范围划分为两组 partition_copyC++11 拷贝指定范围的划分结果 partition_pointC++11 返回被划分范围的划分点 stable_partition 稳定划分,两组元素各维持相对顺序 排序操作​ 函数 说明 is_sortedC++11 检测指定范围是否已排序 is_sorted_untilC++11 返回最大已排序子范围 nth_element 部份排序指定范围中的元素,使得范围按给定位置处的元素划分 partial_sort 部份排序 partial_sort_copy 拷贝部分排序的结果 sort 排序 stable_sort 稳定排序 二分法查找操作​ 函数 说明 binary_search 判断范围中是否存在值等价于给定值的元素 equal_range 返回范围中值等于给定值的元素组成的子范围 lower_bound 返回指向范围中第一个值大于或等于给定值的元素的迭代器 upper_bound 返回指向范围中第一个值大于给定值的元素的迭代器 集合操作​ 函数 说明 includes 判断一个集合是否是另一个集合的子集 inplace_merge 就绪合并 merge 合并 set_difference 获得两个集合的差集 set_intersection 获得两个集合的交集 set_symmetric_difference 获得两个集合的对称差 set_union 获得两个集合的并集 堆操作​ 函数 说明 is_heap 检测给定范围是否满足堆结构 is_heap_untilC++11 检测给定范围中满足堆结构的最大子范围 make_heap 用给定范围构造出一个堆 pop_heap 从一个堆中删除最大的元素 push_heap 向堆中增加一个元素 sort_heap 将满足堆结构的范围排序 最大/最小操作​ 函数 说明 is_permutationC++11 判断一个序列是否是另一个序列的一种排序 lexicographical_compare 比较两个序列的字典序 max 返回两个元素中值最大的元素 max_element 返回给定范围中值最大的元素 min 返回两个元素中值最小的元素 min_element 返回给定范围中值最小的元素 minmaxC++11 返回两个元素中值最大及最小的元素 minmax_elementC++11 返回给定范围中值最大及最小的元素 next_permutation 返回给定范围中的元素组成的下一个按字典序的排列 prev_permutation 返回给定范围中的元素组成的上一个按字典序的排列","s":"algorithm","u":"/docs/Algorithms/机试技巧与STL","h":"#algorithm","p":214},{"i":248,"t":"头文件:vector 在STL的vector头文件中定义了vector(向量容器模版类),vector容器以连续数组的方式存储元素序列,可以将vector看作是以顺序结构实现的线性表。当我们在程序中需要使用动态数组时,vector将会是理想的选择,vector可以在使用过程中动态地增长存储空间。 vector模版类需要两个模版参数,第一个参数是存储元素的数据类型,第二个参数是存储分配器的类型,其中第二个参数是可选的,如果不给出第二个参数,将使用默认的分配器 下面给出几个常用的定义vector向量对象的方法示例: vector s; // 定义一个空的vector对象,存储的是int类型的元素 vector s(n); // 定义一个含有n个int元素的vector对象 vector s(first, last); // 定义一个vector对象,并从由迭代器first和last定义的序列[first, last)中复制初值 vector的基本操作: s[i] // 直接以下标方式访问容器中的元素 s.front() // 返回首元素 s.back() // 返回尾元素 s.push_back(x) // 向表尾插入元素x s.size() // 返回表长 s.empty() // 表为空时,返回真,否则返回假 s.pop_back() // 删除表尾元素 s.begin() // 返回指向首元素的随机存取迭代器 s.end() // 返回指向尾元素的下一个位置的随机存取迭代器 s.insert(it, val) // 向迭代器it指向的元素前插入新元素val s.insert(it, n, val)// 向迭代器it指向的元素前插入n个新元素val s.insert(it, first, last) // 将由迭代器first和last所指定的序列[first, last)插入到迭代器it指向的元素前面 s.erase(it) // 删除由迭代器it所指向的元素 s.erase(first, last)// 删除由迭代器first和last所指定的序列[first, last) s.reserve(n) // 预分配缓冲空间,使存储空间至少可容纳n个元素 s.resize(n) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),元素默认值将填满扩展出的空间 s.resize(n, val) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),val将填满扩展出的空间 s.clear() // 删除容器中的所有元素 s.swap(v) // 将s与另一个vector对象进行交换 s.assign(first, last) // 将序列替换成由迭代器first和last所指定的序列[first, last),[first, last)不能是原序列中的一部分 // 要注意的是,resize操作和clear操作都是对表的有效元素进行的操作,但并不一定会改变缓冲空间的大小 // 另外,vector还有其他的一些操作,如反转、取反等,不再一一列举 // vector上还定义了序列之间的比较操作运算符(>、<、>=、<=、==、!=),可以按照字典序比较两个序列。 // 还是来看一些示例代码吧…… /* * 输入个数不定的一组整数,再将这组整数按倒序输出 */ #include #include using namespace std; int main() { vector L; int x; while(cin >> x) { L.push_back(x); } for (int i = L.size() - 1; i >= 0; i--) { cout << L[i] << \" \"; } cout << endl; return 0; }","s":"vector","u":"/docs/Algorithms/机试技巧与STL","h":"#vector","p":214},{"i":250,"t":"头文件:list 下面给出几个常用的定义list对象的方法示例: lista{1,2,3} lista(n) //声明一个n个元素的列表,每个元素都是0 lista(n, m) //声明一个n个元素的列表,每个元素都是m lista(first, last) //声明一个列表,其元素的初始值来源于由区间所指定的序列中的元素,first和last是迭代器 list的基本操作: a.begin() // 返回指向首元素的随机存取迭代器 a.end() // 返回指向尾元素的下一个位置的随机存取迭代器 a.push_front(x) // 向表头插入元素x a.push_back(x) // 向表尾插入元素x a.pop_back() // 删除表尾元素 a.pop_front() // 删除表头元素 a.size() // 返回表长 a.empty() // 表为空时,返回真,否则返回假 a.resize(n) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),元素默认值将填满扩展出的空间 a.resize(n, val) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),val将填满扩展出的空间 a.clear() // 删除容器中的所有元素 a.front() // 返回首元素 a.back() // 返回尾元素 a.swap(v) // 将a与另一个list对象进行交换 a.merge(b) // 调用结束后b变为空,a中元素包含原来a和b的元素 a.insert(it, val) // 向迭代器it指向的元素前插入新元素val a.insert(it, n, val)// 向迭代器it指向的元素前插入n个新元素val a.insert(it, first, last) // 将由迭代器first和last所指定的序列[first, last)插入到迭代器it指向的元素前面 a.erase(it) // 删除由迭代器it所指向的元素 a.erase(first, last)// 删除由迭代器first和last所指定的序列[first, last) a.remove(x) // 删除了a中所有值为x的元素 a.assign(n, val) // 将a中的所有元素替换成n个val元素 a.assign(b.begin(), b.end()) //将a变成b","s":"list","u":"/docs/Algorithms/机试技巧与STL","h":"#list","p":214},{"i":252,"t":"头文件:string string是STL的字符串类型,通常用来表示字符串。而在使用string之前,字符串通常是用char*表示的。 string和char*的区别 string是一个类, char*是一个指向字符的指针。 string封装了char*,管理这个字符串,是一个char*型的容器。也就是说string是一个容器,里面元素的数据类型是char*。 string不用考虑内存释放和越界。 string管理char*所分配的内存。每一次string的复制,取值都由string类负责维护,不用担心复制越界和取值越界等。 string提供了一系列的字符串操作函数 查找find,拷贝copy,删除erase,替换replace,插入insert. 构造和析构函数: 表达式 效果 string s 生成一个空字符串 string s(str) copy构造函数,生成一个str的复制品 string s(str,idx) 将string内始于位置idx的部分当作字符串s的初值 string s(str,idx,len) 将string内始于位置idx且长度最多为len的部分当作字符串s的初值 string s(cstr) 以C-string字符串cstr作为字符串s的初值 string s(cstr,len) 以C-string字符串cstr的前len个字符作为字符串s的初值 string s(num,c) 生成一个字符串,包含num个字符c string s(beg,end) 以区间[beg,end]内所有字符作为字符串s的初值 操作函数: 操作函数 效果 =,assign() 赋以新值 swap() 交换两个字符串的内容 +=, append(),push_back() 添加字符 insert() 插入字符 erase() 删除字符 clear() 移除全部字符 resize() 改变字符数量 replace() 替换字符 + 串联字符串 ==,!=,<,<=,>,>=,compare() 比较字符串内容 size(),length() 返回字符数量,等效函数 max_size() 返回字符的最大可能个数 empty() 判断字符串是否为空 capacity() 返回重新分配之前的字符容量 reserve() 保留一定量内存以容纳一定数量的字符 [ ],at() 存取单一字符 >>,getline() 从stream中读取某值 << 将某值写入stream copy() 将内容复制为一个C-string c_str() 将内容以C-string形式返回 data() 将内容以字符数组形式返回 substr() 返回某个子字符串 begin(),end() 提供正常的迭代器支持 rbegin(),rend() 提供逆向迭代器支持","s":"string","u":"/docs/Algorithms/机试技巧与STL","h":"#string","p":214},{"i":254,"t":"头文件:utility STL的utility头文件中描述了一个看上去非常简单的模版类pair,用来表示一个二元组或元素对,并提供了按照字典序对元素对进行大小比较运算符模版函数。 Example,想要定义一个对象表示一个平面坐标点,则可以: pair p; cin >> p.first >> p.second; pair模版类需要两个参数:首元素的数据类型和尾元素的数据类型。pair模版类对象有两个成员:first和second,分别表示首元素和尾元素。 在其中已经定义了pair上的六个比较运算符:<、>、<=、>=、==、!=,其规则是先比较first,first相等时再比较second,这符合大多数应用的逻辑。当然,也可以通过重载这几个运算符来重新指定自己的比较逻辑。 除了直接定义一个pair对象外,如果需要即时生成一个pair对象,也可以调用在其中定义的一个模版函数:make_pair。make_pair需要两个参数,分别为元素对的首元素和尾元素。","s":"pair","u":"/docs/Algorithms/机试技巧与STL","h":"#pair","p":214},{"i":256,"t":"头文件:map 在STL的头文件中map中定义了模版类map和multimap,用有序二叉树表存储类型为pair的元素对序列。序列中的元素以const Key部分作为标识,map中所有元素的Key值必须是唯一的,multimap则允许有重复的Key值。 可以将map看作是由Key标识元素的元素集合,这类容器也被称为“关联容器”,可以通过一个Key值来快速决定一个元素,因此非常适合于需要按照Key值查找元素的容器。 map模版类需要四个模版参数,第一个是键值类型,第二个是元素类型,第三个是比较算子,第四个是分配器类型。其中键值类型和元素类型是必要的。 定义map对象的代码示例: map m; map的基本操作: /* 向map中插入元素 */ m[key] = value; // [key]操作是map很有特色的操作,如果在map中存在键值为key的元素对, 则返回该元素对的值域部分,否则将会创建一个键值为key的元素对,值域为默认值。所以可以用该操作向map中插入元素对或修改已经存在的元素对的值域部分。 m.insert(make_pair(key, value)); // 也可以直接调用insert方法插入元素对,insert操作会返回一个pair,当map中没有与key相匹配的键值时,其first是指向插入元素对的迭代器,其second为true;若map中已经存在与key相等的键值时,其first是指向该元素对的迭代器,second为false。 /* 查找元素 */ int i = m[key]; // 要注意的是,当与该键值相匹配的元素对不存在时,会创建键值为key(当另一个元素是整形时,m[key]=0)的元素对。 map::iterator it = m.find(key); // 如果map中存在与key相匹配的键值时,find操作将返回指向该元素对的迭代器,否则,返回的迭代器等于map的end()(参见vector中提到的begin()和end()操作)。 /* 删除元素 */ m.erase(key); // 删除与指定key键值相匹配的元素对,并返回被删除的元素的个数。 m.erase(it); // 删除由迭代器it所指定的元素对,并返回指向下一个元素对的迭代器。 /* 其他操作 */ m.size(); // 返回元素个数 m.empty(); // 判断是否为空 m.clear(); // 清空所有元素","s":"map","u":"/docs/Algorithms/机试技巧与STL","h":"#map","p":214},{"i":258,"t":"头文件:stack stack模版类的定义在stack头文件中。 stack模版类需要两个模版参数,一个是元素类型,另一个是容器类型,但是只有元素类型是必要的,在不指定容器类型时,默认容器的类型为deque。 定义stack对象的示例代码如下: stack s; stack ss; stack的基本操作有: s.push(x); // 入栈 s.pop(); // 出栈 s.top(); // 访问栈顶 s.empty(); // 当栈空时,返回true s.size(); // 访问栈中元素个数","s":"stack","u":"/docs/Algorithms/机试技巧与STL","h":"#stack","p":214},{"i":260,"t":"头文件:queue queue模版类的定义在queue头文件中。 queue与stack相似,queue模版类也需要两个模版参数,一个元素类型,一个容器类型,元素类型时必须的,容器类型时可选的,默认为deque类型。 定义queue对象的示例代码必须如下: queue q; queue qq; queue的基本操作: q.push(x); // 入队列 q.pop(); // 出队列 q.front(); // 访问队首元素 q.back(); // 访问队尾元素 q.empty(); // 判断队列是否为空 q.size(); // 访问队列中的元素个数","s":"queue","u":"/docs/Algorithms/机试技巧与STL","h":"#queue","p":214},{"i":262,"t":"头文件:set set是与集合相关的容器,STL为我们提供了set的实现,在编程题中遇见集合问题直接调用是十分方便的。 定义set对象的示例代码如下: set s; set ss; set的基本操作: s.begin() // 返回指向第一个元素的迭代器 s.clear() // 清除所有元素 s.count() // 返回某个值元素的个数 s.empty() // 如果集合为空,返回true(真) s.end() // 返回指向最后一个元素之后的迭代器,不是最后一个元素 s.equal_range() // 返回集合中与给定值相等的上下限的两个迭代器 s.erase() // 删除集合中的元素 s.find() // 返回一个指向被查找到元素的迭代器 s.get_allocator() // 返回集合的分配器 s.insert() // 在集合中插入元素 s.lower_bound() // 返回指向大于(或等于)某值的第一个元素的迭代器 s.key_comp() // 返回一个用于元素间值比较的函数 s.max_size() // 返回集合能容纳的元素的最大限值 s.rbegin() // 返回指向集合中最后一个元素的反向迭代器 s.rend() // 返回指向集合中第一个元素的反向迭代器 s.size() // 集合中元素的数目 s.swap() // 交换两个集合变量 s.upper_bound() // 返回大于某个值元素的迭代器 s.value_comp() // 返回一个用于比较元素间的值的函数","s":"set","u":"/docs/Algorithms/机试技巧与STL","h":"#set","p":214},{"i":264,"t":"头文件:set 在set头文件中,还定义了另一个非常实用的模版类multiset(多重集合)。多重集合与集合的区别在于集合中不能存在相同元素,而多重集合中可以存在。 定义multiset对象的示例代码如下: multiset s; multiset ss; multiset和set的基本操作相似,需要注意的是,集合的count()能返回0(无)或者1(有),而多重集合是有多少个返回多少个。","s":"multiset","u":"/docs/Algorithms/机试技巧与STL","h":"#multiset","p":214},{"i":266,"t":"头文件:bitset 在 STLSTL 的头文件中 bitset中定义了模版类 bitsetbitset,用来方便地管理一系列的 bitbit 位的类。bitsetbitset 除了可以访问指定下标的 bitbit 位以外,还可以把它们作为一个整数来进行某些统计。 bitsetbitset 模板类需要一个模版参数,用来明确指定含有多少位。 定义 bitsetbitset 对象的示例代码: const int MAXN = 32; bitset bt; // bt 包括 MAXN 位,下标 0 ~ MAXN - 1,默认初始化为 0 bitset bt1(0xf); // 0xf 表示十六进制数 f,对应二进制 1111,将 bt1 低 4 位初始化为 1 bitset bt2(012); // 012 表示八进制数 12,对应二进制 1010,即将 bt2 低 4 位初始化为 1010 bitset bt3(\"1010\"); // 将 bt3 低 4 位初始化为 1010 bitset bt4(s, pos, n);// 将 01 字符串 s 的 pos 位开始的 n 位初始化 bt4 bitsetbitset 基本操作: bt.any() // bt 中是否存在置为 1 的二进制位? bt.none() // bt 中不存在置为 1 的二进制位吗? bt.count() // bt 中置为 1 的二进制位的个数 bt.size() // bt 中二进制位的个数 bt[pos] // 访问 bt 中在 pos 处的二进制位 bt.test(pos) // bt 中在 pos 处的二进制位是否为 1 bt.set() // 把 bt 中所有二进制位都置为 1 bt.set(pos) // 把 bt 中在 pos 处的二进制位置为 1 bt.reset() // 把 bt 中所有二进制位都置为 0 bt.reset(pos) // 把 bt 中在pos处的二进制位置为0 bt.flip() // 把 bt 中所有二进制位逐位取反 bt.flip(pos) // 把 bt 中在 pos 处的二进制位取反 bt[pos].flip() // 同上 bt.to_ulong() // 用 bt 中同样的二进制位返回一个 unsigned long 值 os << bt // 把 bt 中的位集输出到 os 流","s":"bitset","u":"/docs/Algorithms/机试技巧与STL","h":"#bitset","p":214},{"i":269,"t":"#include #include #include using namespace std; #define MAX(a, b) ((a) > (b) ? (a) : (b) ) //定义图的定点 typedef struct Vertex { int id; vector connectors; //存储节点的后续连接顶点编号 Vertex() : id(-1) {} Vertex(int nid) : id(nid) {} } Vertex; //定义Graph的邻接表表示 typedef struct Graph { vector vertexs; //存储定点信息 int nVertexs; //计数:邻接数 bool isDAG; //标志:是有向图吗 Graph(int n, bool isDAG) : nVertexs(n), isDAG(isDAG) { vertexs.resize(n); } //向图中添加边 bool addEdge(int id1, int id2) { if (!(MAX(id1, id2) < vertexs.size())) return false; if (isDAG) { vertexs[id1].connectors.push_back(id2); } else { vertexs[id1].connectors.push_back(id2); vertexs[id2].connectors.push_back(id1); } return true; } //广度优先搜索 vector BFS(int start) { set visited; vector g, rst; g.push_back(start); visited.insert(start); while(g.size() > 0) { int id = g[0]; g.erase(g.begin()); rst.push_back(id); for(int i = 0; i < vertexs[id].connectors.size(); i++) { int id1 = vertexs[id].connectors[i]; if (visited.count(id1) == 0) { g.push_back(id1); visited.insert(id1); } } } return rst; } //深度优先搜索 vector DFS(int start) { set visited; vector g, rst; g.push_back(start); //cout << \"push \" << start << \" \"; visited.insert(start); rst.push_back(start); bool found; while(g.size() > 0) { int id = g[g.size()-1]; found = false; for(int i = 0; i < vertexs[id].connectors.size(); i++) { int id1 = vertexs[id].connectors[i]; if (visited.count(id1) == 0) { g.push_back(id1); rst.push_back(id1); visited.insert(id1); //cout << \"push \" << id1 << \" \"; found = true; break; } } if (!found) { int id2 = g[g.size()-1]; rst.push_back(-1 * id2); //cout << \"pop \" << id2 << \" \"; g.pop_back(); } } //cout << endl; return rst; } } Graph; int main() { Graph g(8, false); g.addEdge(0, 1); g.addEdge(0, 3); g.addEdge(1, 2); g.addEdge(3, 4); g.addEdge(3, 5); g.addEdge(4, 5); g.addEdge(4, 6); g.addEdge(5, 6); g.addEdge(5, 7); g.addEdge(6, 7); vector bv = g.BFS(0); cout << \"宽度优先搜索节点顺序:\"; for(int j = 0; j < bv.size(); j++) cout << bv[j] << \" \"; cout << endl; cout << \"深度优先搜索节点顺序:\"; Graph g1(6, false); g1.addEdge(0, 1); g1.addEdge(0, 4); g1.addEdge(0, 5); g1.addEdge(1, 5); g1.addEdge(4, 5); g1.addEdge(5, 2); g1.addEdge(5, 3); g1.addEdge(2, 3); vector route = g1.DFS(0); for(int i = 0; i < route.size(); i++) cout << route[i] << \" \"; cout << endl; char ch; cin >> ch; return 0; }","s":"不带出入度的最简模板","u":"/docs/Algorithms/机试技巧与STL","h":"#不带出入度的最简模板","p":214},{"i":271,"t":"#include #include #include #include #define MAX(a, b) ((a) > (b) ? (a) : (b) ) using namespace std; int n,m; vector inDegreelist,outDegreelist; //定义图的定点 typedef struct Vertex { int id,inDegree,outDegree; vector connectors; //存储节点的后续连接顶点编号 Vertex() : id(-1),inDegree(0),outDegree(0) {} Vertex(int nid) : id(nid),inDegree(0),outDegree(0) {} } Vertex; //定义Graph的邻接表表示 typedef struct Graph { vector vertexs; //存储定点信息 int nVertexs; //计数:邻接数 bool isDAG; //标志:是有向图吗 Graph(int n, bool isDAG) : nVertexs(n), isDAG(isDAG) { vertexs.resize(n); } Graph() : nVertexs(1), isDAG(1) { vertexs.resize(1); } //向图中添加边 bool addEdge(int id1, int id2) { if (!(MAX(id1, id2) < vertexs.size())) return false; if (isDAG) { vertexs[id1].connectors.push_back(id2); vertexs[id1].outDegree++; vertexs[id2].inDegree++; } else { vertexs[id1].connectors.push_back(id2); vertexs[id2].connectors.push_back(id1); vertexs[id1].outDegree++; vertexs[id1].inDegree++; vertexs[id2].outDegree++; vertexs[id2].inDegree++; } return true; } } Graph; Graph g; void init(){ cin>>n>>m; g=Graph(n, true); int src,dst; while(m--){ cin>>src>>dst; g.addEdge(src,dst); } vector::iterator it = g.vertexs.begin(); while(it!=g.vertexs.end()){ inDegreelist.push_back(it->inDegree); outDegreelist.push_back(it->outDegree); it++; } } int countin(int n){ return count(inDegreelist.begin(),inDegreelist.end(),n); } int countout(int n){ return count(outDegreelist.begin(),outDegreelist.end(),n); } bool Is_List(){ //有一个inDegree为0的头和一个outDegree为0的尾,且其余节点入度与出度都为1; return (countin(0)==1)&&(countout(0)==1)&&(countin(1)==n-1)&&(countout(1)==n-1); } bool Is_Tree(){ //有一个inDegree为0的头且其余节点inDegree均为1,且不是链表; return (countin(0)==1)&&(countin(1)==n-1); } bool topologicalSort(){//拓扑排序判断有环无环 int num=0;//记录加入拓扑排序的顶点数 queue q; for(int i=0;i #include #ifndef BASE #define BASE #define TRUE 1 #define FALSE 0 #define OK 1 #define ERROR 0 #define INFEASIBLE -1 #define OVERFLOW -2 typedef int Status; typedef int bool; #endif #define VertexType char //点类型 #define VRType int //边类型 #define maxSize 100 void Visit(VertexType e) { printf(\"%c\", e); } #define MAX_VERTEX_NUM 20 typedef enum{DG, UDG} GraphKind; typedef struct ArcNode{ int adjV; //边指向的顶点 VRType weight; //权重 struct ArcNode *next; }ArcNode; //边 typedef struct VNode{ VertexType data; ArcNode *firstarc; }VNode, AdjList[MAX_VERTEX_NUM]; //顶点 typedef struct{ GraphKind kind; int vernum,arcnum; AdjList vers; }ALGraph; /*------------------------ |7.14 创建有向图的邻接表| ------------------------*/ Status InitGraph_AL(ALGraph *pG) { //初始化 int i; pG->arcnum = 0; pG->vernum = 0; for (i=0; ivers[i].firstarc = NULL; //VC++6.0中指针初始化为0xcccccccc return OK; } int LocateVex_AL(ALGraph G, VertexType e) { //定位值为e的元素下标 int i; for (i=0; i弧的数目->各顶点的信息->各条弧的信息 int i,a,b; char tmp[MAX_VERTEX_NUM]; char h,t; ArcNode *p, *q; InitGraph_AL(pG); //VC++6.0中指针初始化为0xcccccccc,如果不将指针初始化为NULL,会出错 //图的类型 pG->kind = DG; //顶点数目 scanf(\"%d\", &i); if (i<0) return ERROR; pG->vernum = i; //弧的数目 scanf(\"%d\", &i); if (i<0) return ERROR; pG->arcnum = i; //各顶点信息 scanf(\"%s\", tmp); for (i=0; ivernum; ++i) pG->vers[i].data=tmp[i]; //弧的信息 for (i=0; iarcnum; ++i) { scanf(\"%s\", tmp); h = tmp[0]; t = tmp[2]; a = LocateVex_AL(*pG, h); b = LocateVex_AL(*pG, t); if (a<0 || b<0) return ERROR; p = (ArcNode *)malloc(sizeof(ArcNode)); if (!p) exit(OVERFLOW); p->adjV=b;p->next=NULL; if (pG->vers[a].firstarc) { //已经有边了 for (q = pG->vers[a].firstarc; q->next; q=q->next) ; //找到最后一条 q->next = p; } else { //第一条边 pG->vers[a].firstarc = p; } } return OK; } /*---------------------------------------------------------------- |7.28 有向图-从u-v的所有简单路径 | ----------------------------------------------------------------*/ int visit[MAX_VERTEX_NUM]; //前面定义了 VertexType paths[maxSize][MAX_VERTEX_NUM]; //存放路径 int path[MAX_VERTEX_NUM]; //路径 int pathnum=0; //当前是第几条路径 void FindAllPath(ALGraph G, int u,int v,int k) { //u->v当前是第k个位置 int i; ArcNode *p; visit[u]=1; //走到了u path[k]=u; //添加到路径->下标位置为k的结点是u(第k+1个是u) if (u==v) { //找到了 for (i=0; i<=k; i++) {//复制到paths paths[pathnum][i] = G.vers[path[i]].data; } paths[pathnum][i]='\\0'; //结束符 pathnum++; //找下一条路径 } else { //u的邻边开始找 for (p=G.vers[u].firstarc; p; p=p->next) { if (visit[p->adjV]==0) FindAllPath(G, p->adjV, v, k+1); //去这个邻接点找 } } // 回溯到上一个结点 // 注意:回溯应该写在外面-->也就是不管有没有找到都要回溯 visit[u]=0; path[k]=0; } int main() { /*7.28 6 11 ABCDEF B,A B,D C,B C,F D,C D,E D,F E,A F,A F,B F,E B->A A->B D->A */ int i,j; int cnt; ALGraph G; char tmp[20]; CreateDG_AL(&G); while (1) { scanf(\"%s\", tmp); //A->B i = LocateVex_AL(G, tmp[0]); j = LocateVex_AL(G, tmp[3]); for (cnt=0; cnt #include #define mem(a,b) memset(a,b,sizeof a); using namespace std; typedef long long ll; const int maxn=50; int mid[maxn],po[maxn],pr[maxn]; int first; struct node { int l,r; }T[maxn]; // 中序+先序=>二叉树 int mid_pr_build(int la,int ra,int lb,int rb) // la,ra:表示中序遍历 lb,rb:表示先序遍历 { // 这里不能等于,因为假设:len==1,则la==ra,直接返回,但是实际上是有一个 rt 的,却没被建立 if(la>ra) return 0; int rt=pr[lb]; // 因为先序遍历第一个是根节点 int p1=la,p2; while(mid[p1]!=rt) p1++; // 在中序遍历中找到根节点 p2=p1-la; T[rt].l=mid_pr_build(la,p1-1,lb+1,lb+p2); // 左子树(锁定左子树范围的下标) T[rt].r=mid_pr_build(p1+1,ra,lb+p2+1,rb); // 右子树(锁定右子树范围的下标) return rt; } // 中序+后序=>二叉树 int mid_po_build(int la,int ra,int lb,int rb) // la,ra:表示中序遍历 lb,rb:表示后序遍历 { if(la>ra) return 0; int rt=po[rb]; // 因为后序遍历最后一个是根节点 int p1=la,p2; while(mid[p1]!=rt) p1++; // 在中序遍历中找到根节点 p2=p1-la; T[rt].l=mid_po_build(la,p1-1,lb,lb+p2-1); // 左子树(锁定左子树范围的下标) T[rt].r=mid_po_build(p1+1,ra,lb+p2,rb-1); // 右子树(锁定右子树范围的下标) return rt; } // 求树高 int getHeight(int rt) { if(rt==0) return 0; return 1+max(getHeight(T[rt].l),getHeight(T[rt].r)); } // 层序遍历 void bfs(int rt) { queue q; vector v; q.push(rt); while(!q.empty()) { int w=q.front(); q.pop(); v.push_back(w); if(T[w].l!=0) q.push(T[w].l); if(T[w].r!=0) q.push(T[w].r); } int len=v.size(); for(int i=0;i #include #define mem(a,b) memset(a,b,sizeof a); using namespace std; typedef long long ll; const int maxn=50; int mid[maxn],po[maxn],pr[maxn]; int first; struct node { int l,r; }T[maxn]; int mid_pr_build(int la,int ra,int lb,int rb) { if(la>ra) return 0; int rt=pr[lb]; int p1=la,p2; while(mid[p1]!=rt) p1++; p2=p1-la; T[rt].l=mid_pr_build(la,p1-1,lb+1,lb+p2); T[rt].r=mid_pr_build(p1+1,ra,lb+p2+1,rb); return rt; } int mid_po_build(int la,int ra,int lb,int rb) { if(la>ra) return 0; int rt=po[rb]; int p1=la,p2; while(mid[p1]!=rt) p1++; p2=p1-la; T[rt].l=mid_po_build(la,p1-1,lb,lb+p2-1); T[rt].r=mid_po_build(p1+1,ra,lb+p2,rb-1); return rt; } int getHeight(int rt) { if(rt==0) return 0; return 1+max(getHeight(T[rt].l),getHeight(T[rt].r)); } void bfs(int rt) { queue q; vector v; q.push(rt); while(!q.empty()) { int w=q.front(); q.pop(); v.push_back(w); if(T[w].l!=0) q.push(T[w].l); if(T[w].r!=0) q.push(T[w].r); } int len=v.size(); for(int i=0;i #include #define mem(a,b) memset(a,b,sizeof a) #define ssclr(ss) ss.clear(), ss.str(\"\") #define INF 0x3f3f3f3f #define MOD 1000000007 using namespace std; typedef long long ll; const int maxn=5e4+1000; int f; int pre[maxn], in[maxn]; struct node { int l,r,d; }T[maxn]; int create(int l1,int r1,int l2,int r2) // in pre { if(l2>r2) return -1; int rt=l2; int p1=l1,p2; while(in[p1]!=pre[rt]) p1++; p2=p1-l1; T[rt].d=pre[rt]; T[rt].l=create(l1,p1-1,l2+1,l2+p2); T[rt].r=create(p1+1,r1,l2+p2+1,r2); return rt; } void postT(int rt) { if(rt==-1 || !f) return; postT(T[rt].l); postT(T[rt].r); if(f) f=0, printf(\"%d\\n\",T[rt].d); } int main() { int n; scanf(\"%d\",&n); for(int i=0;i a(10,3); // 定义一个长度为10的vector,初始化为3; a.size(); // vector的size,所有容器都有 a.empty(); // 范围vector是否为空,所有容器都有 a.clear(); // 清空 a.front(); // 第一个数 a.back(); // 最后一个数 a.push_back(); // 在最后插入一个数 a.pop_back(); // 删除最后一个数 // vector支持比较运算 vector a(4,3),b(3,4); if(a > b) cout << \"Yes\"; else cout << \"No\"","s":"vector","u":"/docs/Algorithms/STL模板","h":"#vector","p":288},{"i":293,"t":"pair a; a = {20,\"abc\"}; a.first(); // 获取第一个元素 a.second(); // 获取第二个元素 // pair也能进行sort","s":"pair","u":"/docs/Algorithms/STL模板","h":"#pair","p":288},{"i":295,"t":"string a = \"Acwing\"; a.size(); // 获取string的大小 a.empty(); // 判断是否为空 a.clear(); // 清空 a += \"def\"; cout << a. substr(1,2) << endl; // 第一个参数起始位置,第二个参数是字符串长度","s":"string","u":"/docs/Algorithms/STL模板","h":"#string","p":288},{"i":297,"t":"query a; a.size(); a.empty(); a.push(1); // 队尾插入元素 a.front(); // 返回队头元素 a.back(); // 返回队尾元素 a.pop(); // 删除队头元素","s":"query","u":"/docs/Algorithms/STL模板","h":"#query","p":288},{"i":299,"t":"// 默认是大根堆 priority_queue heap; heap.clear(); heap.size(); heap.empty(); // 如何定义一个小根堆: 1. 插入负数 2. 直接定义 heap.push(-x); // 黑科技方法 priority_queue,greater> q;","s":"priority_queue","u":"/docs/Algorithms/STL模板","h":"#priority_queue","p":288},{"i":301,"t":"stack s; s.size(); s.empty(); s.push(); s.top(); s.pop();","s":"stack","u":"/docs/Algorithms/STL模板","h":"#stack","p":288},{"i":303,"t":"deque a; a.size(); a.empty(); a.clear(); a.front(); a.back(); a.push_back(); a.pop_back();","s":"deque","u":"/docs/Algorithms/STL模板","h":"#deque","p":288},{"i":305,"t":"set s; // 不能有重复元素 // s.begin()/end() multiset MS; // 可以有重复元素 s.insert(1); 插入一个数 s.size(); s.empty(); s.clear(); s.find(1); // 查找一个元素,如果不存在的话返回end迭代器 s.erase(1); // 输入是一个数x,输出所有x (2)输入一个迭代器,删除这个迭代器 // set 最核心的操作 s.lower_bound(); // 范围大于等于x的最小的数 s.upper_bound(); // 返回大于x的最小的数","s":"set/multiset","u":"/docs/Algorithms/STL模板","h":"#setmultiset","p":288},{"i":307,"t":"#include // 和python里面的字典非常的相似 map a; a[\"2\"] = 3; a.insert({\"1\",1}); a.erase({\"1\",1}); a.find({\"1\",1}); unordered_set, unordered_map, unordered_multiset, unordered_multimap的操作和set或者map等的操作基本一致,唯一的区别就是不支持类似lower_bound()这样的操作 (哈希表的内部是无序的)","s":"map/multimap","u":"/docs/Algorithms/STL模板","h":"#mapmultimap","p":288},{"i":309,"t":"可以省下来8位的空间 bitset<10000> s; // 支持所有的基本操作: // 移位操作:<< >> // == != // count() 返回有多少个1 // any() 判断是否至少有一个1 // none() 判断是否全为0 // set(),把所有为置为1 // set(k,v), 将第k个变为v // reset(), 把所有位变成0 // flip(), 把所有位取反","s":"biset","u":"/docs/Algorithms/STL模板","h":"#biset","p":288},{"i":312,"t":"卷积层会对输入的局部区域进行卷积操作,因此对于输入图像中的每个位置都会产生一个响应。然而,在某些情况下,我们并不关心输入图像中每个位置的细节,而只是想获取该区域的一些重要特征。 假设我们想分类一张猫的图片,那么我们可能只需要提取出它的眼睛、鼻子、嘴巴和耳朵等特征,而不必考虑这些特征在图像中的精确位置。","s":"一、卷积对像素位置信息是敏感的","u":"/docs/Deep Learning/基础知识/池化层","h":"#一卷积对像素位置信息是敏感的","p":310},{"i":314,"t":"池化层通过对输入的局部区域进行降采样操作,减少了特征图的大小,从而使得模型对于输入位置的微小变化更加鲁棒。例如,如果我们将一个对象稍微平移一点,它依然可以被正确地识别,因为池化层可以保留输入图像的关键特征,而忽略掉微小的位置变化。 但是需要注意的是,当池化的步幅和池化区域的大小过大时,会导致模型丢失较多的细节信息,从而影响模型性能。因此,在实际应用中,需要根据具体任务来选择适当的池化参数。 缓解卷积层对位置的敏感性,提高鲁棒:池化操作通常用于卷积层之后,使模型对于输入位置的微小变化更加鲁棒,减少图像中的噪声和冗余信息 减小特征图大小:池化操作会通过在特定位置上合并特征值来缩小输入特征图的空间大小,降低计算开销。 减少参数数量:池化操作减小了特征图的空间大小,从而也减小了需要训练的权重参数数量,更容易训练和优化。","s":"二、池化层的作用","u":"/docs/Deep Learning/基础知识/池化层","h":"#二池化层的作用","p":310},{"i":316,"t":"池化层将输入特征图分割成若干个区域,然后对每个区域进行汇聚操作,将该区域内的特征值合并成一个值。这个操作可以使用不同的方法实现,如最大值池化、平均值池化等。 最常见的是最大值池化,其中每个区域的输出值是该区域内特征值的最大值,这样可以保留图像中最显著的特征,同时减少噪声和冗余信息的影响。","s":"三、池化的实现","u":"/docs/Deep Learning/基础知识/池化层","h":"#三池化的实现","p":310},{"i":319,"t":"kh=kw=1k_h=k_w=1kh​=kw​=1的卷积不识别空间模式,丢弃了空间信息,只是融合通道 相当于输入形状为HW×ciHW \\times c_iHW×ci​,权重形状为co×cic_o \\times c_ico​×ci​的全连接层","s":"一、1x1卷积","u":"/docs/Deep Learning/基础知识/卷积层","h":"#一1x1卷积","p":317},{"i":321,"t":"输入:ci×H×Wc_i \\times H \\times Wci​×H×W 核:co×ci×kh×kwc_o \\times c_i \\times k_h \\times k_wco​×ci​×kh​×kw​ 偏差:co×cic_o \\times c_ico​×ci​ 输出:co×H′×W′c_o \\times H' \\times W'co​×H′×W′ 输出H′以及W′H'以及W'H′以及W′的计算: shapeoutput=shapeinput−sizekernel+2∗paddingstride+1(1)shape_{output} = \\frac{shape_{input}-size_{kernel}+2*padding}{stride}+1 \\tag{1}shapeoutput​=strideshapeinput​−sizekernel​+2∗padding​+1(1) 计算复杂度:O(co×ci×H×W×H′×W′)O(c_o \\times c_i \\times H \\times W \\times H' \\times W')O(co​×ci​×H×W×H′×W′) 总结: 输出的通道数是卷积层的超参数 每个输入通道有独立的二维卷积核,所有通道结果相加得到一个输出结果 每个输出通道有独立的三维卷积核","s":"二、二维卷积层","u":"/docs/Deep Learning/基础知识/卷积层","h":"#二二维卷积层","p":317},{"i":325,"t":"σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ (1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma \\space (1 - \\sigma) \\tag{2}dxdσ​=σ (1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了","s":"1. Sigmoid函数 / Logistic函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#1-sigmoid函数--logistic函数","p":322},{"i":327,"t":"f(x)={0x<0xx≥0(3)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{3}f(x)={0x​x<0x≥0​(3) df(x)dx={0x<01x≥0(4)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{4}dxdf(x)​={01​x<0x≥0​(4) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x)","s":"2. 线性整流单元(Rectified Linear Unit, ReLU)","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#2-线性整流单元rectified-linear-unit-relu","p":322},{"i":330,"t":"L2范数是对元素求平方和后再开根号,需要.pow(2)后才可作为损失函数 微小的误差可能对网络性能带来极大的影响 LossMSE=∑[y−f(x)]2(5)Loss_{MSE} = \\sum{[{y - f(x)]^2}} \\tag{5}LossMSE​=∑[y−f(x)]2(5) ∥y−f(x)∥2=∑[y−f(x)]22(6)\\Vert y - f(x) \\Vert_2 = \\sqrt[2]{\\sum{[y - f(x)]^2}} \\tag{6}∥y−f(x)∥2​=2∑[y−f(x)]2​(6)","s":"1. Mean Squared Error 均方误差","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#1-mean-squared-error-均方误差","p":322},{"i":332,"t":"信息熵​ Cross Entropy中的Entropy指的是信息熵,可以理解为不确定性。衡量一个概率分布本身的不确定程度。 It's a measure of surprise, higher entrpoy means less information and higher uncertainty. 假设一个离散型随机变量XXX的可能取值为X=x1,x2,...,xnX=x_1,x_2,...,x_nX=x1​,x2​,...,xn​,而取值事件xix_ixi​发生的概率为PiP_iPi​,则其信息熵的定义为 H(P)=−∑inPi log2(Pi)=∑inPi log2(1Pi)(7)\\begin{align} H(P) &= -\\sum_i^n{P_i}\\space{log_2(P_i)} \\\\ &= \\sum_i^n{P_i}\\space{log_2({\\frac{1}{P_i}}}) \\tag{7} \\end{align}H(P)​=−i∑n​Pi​ log2​(Pi​)=i∑n​Pi​ log2​(Pi​1​)​(7) KL散度​ 在概率论或信息论中,KL散度( Kullback–Leibler Divergence),又称相对熵(relative entropy),是描述两个概率分布P和Q差异的一种方法。 存在两个概率分布P和Q,其离散型随机变量XXX的可能取值为X=x1,x2,...,xnX=x_1,x_2,...,x_nX=x1​,x2​,...,xn​,而取值事件xix_ixi​发生的概率分别为Pi,QiP_i,Q_iPi​,Qi​. KL散度是非对称的,即 DKL(P ∣∣ Q)≠DKL(Q ∣∣ P)(8)D_{KL}(P \\space || \\space Q) \\neq D_{KL}(Q \\space || \\space P)\\tag{8}DKL​(P ∣∣ Q)=DKL​(Q ∣∣ P)(8) DKL(P ∣∣ Q)=∑Pi [log2(Pi)−log2(Qi)](9)D_{KL}(P \\space || \\space Q) = \\sum{P_i\\space [log_2(P_i)-log_2(Q_i)]}\\tag{9}DKL​(P ∣∣ Q)=∑Pi​ [log2​(Pi​)−log2​(Qi​)](9) 特别的,DKL(PLabel ∣ QPred)D_{KL}(P_{Label} \\space | \\space Q_{Pred})DKL​(PLabel​ ∣ QPred​)表示当用概率分布Q来拟合真实分布P时,产生的信息损耗,其中P表示真实分布,Q表示P的拟合分布。 交叉熵​ 衡量两个概率分布P和Q之间的不确定性程度。交叉熵的数学表达为 H(P, Q)=H(P)+DKL(P ∣∣ Q)=−∑Pi log2(Qi)(10)\\begin{align} H(P, \\space Q) &= H(P) + D_{KL}(P\\space || \\space Q) \\\\ &= - \\sum{P_i}\\space{log_2({Q_i})} \\tag{10} \\end{align}H(P, Q)​=H(P)+DKL​(P ∣∣ Q)=−∑Pi​ log2​(Qi​)​(10) PyTorch中的CrossEntropyLoss​ torch.nn.CrossEntropyLoss相当于torch.softmax + torch.log + torch.nn.nllloss. import torch.nn as nn # 使用NLLLoss实现 nllloss = nn.NLLLoss() predict = torch.Tensor([[2, 3, 1], [3, 7, 9]]) predict = torch.log(torch.softmax(predict, dim=-1)) label = torch.tensor([1, 2]) nllloss(predict, label) # output: tensor(0.2684) # 使用CrossEntropyLoss实现 cross_loss = nn.CrossEntropyLoss() predict = torch.Tensor([[2, 3, 1], [3, 7, 9]]) label = torch.tensor([1, 2]) cross_loss(predict, label) # output: tensor(0.2684)","s":"2. Cross Entropy Loss 交叉熵损失","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#2-cross-entropy-loss-交叉熵损失","p":322},{"i":334,"t":"提示 这篇文章主要介绍L1和L2正则化是如何在梯度下降中工作的; 越往后学越意识到基础知识的重要性,这些基础知识可能在你前期理解的时候会比较费劲,但是当你真正的想要去对神经网络进行设计的时候就会体会到他们的重要性。 原文链接:https://towardsdatascience.com/intuitions-on-l1-and-l2-regularisation-235f2db4c261 过拟合是当前机器学习或统计模型针对特定数据集而无法推广到其他数据集时发生的现象。这通常发生在复杂的模型中,比如深度神经网络。 正则化是引入其他信息以防止过度拟合的过程。本文的重点是L1和L2正则化。 有很多解释,但老实说,它们有点太抽象了,我可能会忘记它们,最后访问这些页面,只是再次忘记。在本文中,我将通过梯度下降来解释为什么L1和L2起作用。梯度下降只是一种通过使用梯度值的迭代更新来找到 “正确” 系数的方法。(本文展示了如何在简单的线性回归中使用梯度下降。)","s":"对于正则化的理解","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"","p":333},{"i":336,"t":"L1和L2正则化分别归因于向量w的L1和L2范数。下面有关范数的基础知识: 1-norm (also known as L1 norm): ∥w∥1=∣w1∣+∣w2∣+…+∣wN∣\\|\\mathbf{w}\\|_{1}=\\left|w_{1}\\right|+\\left|w_{2}\\right|+\\ldots+\\left|w_{N}\\right|∥w∥1​=∣w1​∣+∣w2​∣+…+∣wN​∣ 2-norm (also known as L2 norm or Euclidean norm): ∥w∥2=(∣w1∣2+∣w2∣2+…+∣wN∣2)12\\|\\mathbf{w}\\|_{2}=\\left(\\left|w_{1}\\right|^{2}+\\left|w_{2}\\right|^{2}+\\ldots+\\left|w_{N}\\right|^{2}\\right)^{\\frac{1}{2}}∥w∥2​=(∣w1​∣2+∣w2​∣2+…+∣wN​∣2)21​ p-norm ∥w∥p=(∣w1∣p+∣w2∣p+…+∣wN∣p)1p\\|\\mathbf{w}\\|_{p}=\\left(\\left|w_{1}\\right|^{p}+\\left|w_{2}\\right|^{p}+\\ldots+\\left|w_{N}\\right|^{p}\\right)^{\\frac{1}{p}}∥w∥p​=(∣w1​∣p+∣w2​∣p+…+∣wN​∣p)p1​ 实现L1范数进行正则化的线性回归模型称为lasso regression,实现 (平方) L2范数进行正则化的线性回归模型称为岭回归。线性回归的模型对于这两种范数的实现是一样的。 y^=w1x1+w2x2+…+wNxN+b\\hat{y}=w_{1} x_{1}+w_{2} x_{2}+\\ldots+w_{N} x_{N}+by^​=w1​x1​+w2​x2​+…+wN​xN​+b 但是在计算中损失函数包含这些正则项: 备注 严格来说,最后一个方程 (岭回归) 是权重平方为L2范数的损失函数 (注意没有平方根)。 正则化项是 “约束”,在最小化损失函数时,优化算法必须通过该约束来 “坚持”。","s":"L1和L2是什么?","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1和l2是什么","p":333},{"i":338,"t":"让我们定义一个模型,看看L1和L2是如何工作的。为简单起见,我们用一个自变量定义了一个简单的线性回归模型。 y^=wx+b\\hat{y}=w x+by^​=wx+b 在这里,我使用了深度学习中的约定w ('weight ') 和b ('bias')。 在实际应用中,简单的线性回归模型不容易出现过拟合。如引言中所述,深度学习模型由于其模型复杂性而更容易出现此类问题。 因此,请注意,本文中使用的表达式很容易扩展到更复杂的模型,而不仅限于线性回归。","s":"Model","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#model","p":333},{"i":340,"t":"为了证明L1和L2正则化的效果,让我们使用3种不同的损失函数/目标拟合我们的线性回归模型。 我们的目标是尽量减少这些不同的损失。 无正则化的损失函数​ 我们将损失函数L定义为平方误差,其中误差是y (真实值) 和 (预测值) 之间的差。 L=(y^−y)2=(wx+b−y)2\\begin{aligned} L &=(\\hat{y}-y)^{2} \\\\ &=(w x+b-y)^{2} \\end{aligned}L​=(y^​−y)2=(wx+b−y)2​ 让我们假设我们的模型将使用此损失函数进行过拟合。 具有L1正则化的损失函数​ 根据上述损失函数,在其上添加一个L1正则化项如下所示: L1=(wx+b−y)2+λ∣w∣L_{1}=(w x+b-y)^{2}+\\lambda|w|L1​=(wx+b−y)2+λ∣w∣ 其中正则化参数 λ> 0是手动调节的。我们把这个损失函数叫做L1。请注意,除了w = 0时,| w | 在任何地方都是可微的,如下所示。我们以后会需要这个。 d∣w∣dw={1w>0−1w<0\\frac{d|w|}{d w}=\\left\\{\\begin{array}{ll} 1 & w>0 \\\\ -1 & w<0 \\end{array}\\right.dwd∣w∣​={1−1​w>0w<0​ 具有L2正则化的损失函数​ 同样,将L2正则化项添加到L看起来是这样的: L2=(wx+b−y)2+λw2L_{2}=(w x+b-y)^{2}+\\lambda w^{2}L2​=(wx+b−y)2+λw2 同样,λ> 0。 梯度下降​ 现在,让我们根据上面定义的3个损失函数,使用梯度下降优化来求解线性回归模型。回想一下,更新梯度下降中的参数w如下: wnew =w−η∂L∂w\\begin{aligned}w_{\\text {new }}=w-\\eta \\frac{\\partial L}{\\partial w}\\end{aligned}wnew ​=w−η∂w∂L​​ x 1import torch.nn as nn2​3# 使用NLLLoss实现4nllloss = nn.NLLLoss()5predict = torch.Tensor([[2, 3, 1], [3, 7, 9]])6predict = torch.log(torch.softmax(predict, dim=-1))7label = torch.tensor([1, 2])8nllloss(predict, label)9# output: tensor(0.2684)10​11# 使用CrossEntropyLoss实现12cross_loss = nn.CrossEntropyLoss()13predict = torch.Tensor([[2, 3, 1], [3, 7, 9]])14label = torch.tensor([1, 2])15cross_loss(predict, label)16# output: tensor(0.2684)python L:L:L: wnew =w−η∂L∂w=w−η⋅[2x(wx+b−y)]\\begin{aligned} w_{\\text {new }} &=w-\\eta \\frac{\\partial L}{\\partial w} \\\\ &=w-\\eta \\cdot[2 x(w x+b-y)] \\end{aligned}wnew ​​=w−η∂w∂L​=w−η⋅[2x(wx+b−y)]​ L1:L1:L1: wnew =w−η∂L1∂w=w−η⋅[2x(wx+b−y)+λd∣w∣dw]={w−η⋅[2x(wx+b−y)+λ]w>0w−η⋅[2x(wx+b−y)−λ]w<0\\begin{aligned} w_{\\text {new }} &=w-\\eta \\frac{\\partial L_{1}}{\\partial w} \\\\ &=w-\\eta \\cdot\\left[2 x(w x+b-y)+\\lambda \\frac{d|w|}{d w}\\right] \\\\ &=\\left\\{\\begin{aligned} w-\\eta \\cdot[2 x(w x+b-y)+\\lambda] & w>0 \\\\ w-\\eta \\cdot[2 x(w x+b-y)-\\lambda] & w<0 \\end{aligned}\\right. \\end{aligned}wnew ​​=w−η∂w∂L1​​=w−η⋅[2x(wx+b−y)+λdwd∣w∣​]={w−η⋅[2x(wx+b−y)+λ]w−η⋅[2x(wx+b−y)−λ]​w>0w<0​​ L2:L2:L2: wnew =w−η∂L2∂w=w−η⋅[2x(wx+b−y)+2λw]\\begin{aligned} w_{\\text {new }} &=w-\\eta \\frac{\\partial L_{2}}{\\partial w} \\\\ &=w-\\eta \\cdot[2 x(w x+b-y)+2 \\lambda w] \\end{aligned}wnew ​​=w−η∂w∂L2​​=w−η⋅[2x(wx+b−y)+2λw]​","s":"损失函数","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#损失函数","p":333},{"i":342,"t":"从这里开始,让我们对上面的方程式进行以下替换 (以获得更好的可读性): η=1\\eta=1η=1 H=2x(wx+b−y)H=2 x(w x+b-y)H=2x(wx+b−y) 接着我们就可以得到: L:L:L: wnew =w−Hw_{\\text {new }}=w-Hwnew ​=w−H L1: wnew ={(w−H)−λ,w>0(w−H)+λ,w<0w_{\\text {new }}=\\left\\{\\begin{array}{ll} (w-H)-\\lambda, & w>0 \\\\ (w-H)+\\lambda, & w<0 \\end{array}\\right.wnew ​={(w−H)−λ,(w−H)+λ,​w>0w<0​ L2:L2:L2: wnew =(w−H)−2λww_{\\text {new }}=(w-H)-2 \\lambda wwnew ​=(w−H)−2λw","s":"如何避免过拟合","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#如何避免过拟合","p":333},{"i":344,"t":"观察有正则化参数 λ 和没有正则化参数 λ 的权重更新之间的差异。这里有一些地方可以很直观的看出。 Intuition A: 假设用等式0,计算w-H给我们一个w值,导致过拟合。然后,直觉上,公式将减少过拟合的机会,因为引入 λ 使我们远离了前面说过的由于w导致的过拟合问题。 Intuition B: 一个过度拟合的模型意味着我们有一个非常适合我们模型的w值。“完美” 的意思是,如果我们将数据 (x) 替换回模型中,我们的预测将非常非常接近真实的y。当然,这很好,但是我们不想要完美。为什么?因为这意味着我们的模型仅适用于我们训练的数据集。这意味着我们的模型将产生与其他数据集的真实值相去甚远的预测。因此,我们满足于不那么完美,希望我们的模型也能与其他数据进行接近的预测。为此,我们用惩罚项 λ 在等式0中 “taint” 这个完美的w。就如公式15和16所示。 Intution C: 请注意,H 取决于模型 (w和b) 和数据 (x和y)。仅根据公式中的模型和数据更新权重会导致过拟合,从而导致模型泛化性不好。另一方面,在等式15,16中,w的最终值不仅受模型和数据的影响,而且还受与模型和数据无关的预定义参数 λ 的影响。因此,尽管值过大会导致模型严重欠拟合,如果我们设置适当的 λ 值就可以防止过拟合。 Intution D: 不同潜在训练集的权重会更相似——这意味着模型的方差减少了(相反,如果我们每次随机移动权重只是为了摆脱过度拟合的解决方案,方差不会改变)。 我们将为每个功能提供更小的权重。为什么这会减少过度拟合?我觉得很容易思考的方式是,在典型情况下,我们将有少量简单的特征,这些特征将解释大部分方差 (例如,y的大部分将由y_hat = ax+b解释); 但是如果我们的模型没有正则化,我们可以添加我们想要的更多功能来解释数据集的残差方差 (例如y_at = ax+bx ²+ cx ³ + e),这自然会使得模型过度拟合训练。引入权重之和的惩罚意味着模型必须最佳地 “分配” 其权重,因此自然地,该 “资源” 的大部分将用于解释大部分方差的简单特征,而复杂特征的权重很小或为零。","s":"有正则化与没有正则化","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#有正则化与没有正则化","p":333},{"i":346,"t":"比较上面每个等式的第二项。除H外,w的变化取决于 ± λ 项或-2λw项,这突出了以下内容的影响: sign of current w (L1, L2) magnitude of current w (L2) doubling of the regularisation parameter (L2) 虽然使用L1的权重更新会受到第一点的影响,但来自L2的权重更新受所有这三个点的影响。虽然我只是根据迭代方程更新进行了比较,但请注意,这并不意味着一个比另一个 “更好”。 现在,让我们在下面看看如何仅通过当前w的符号就可以实现L1的正则化效应。","s":"L1 vs L2","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1-vs-l2","p":333},{"i":348,"t":"看看方程3.1中的L1。如果w为正,则正则化参数 λ>0将通过从w中减去 λ 来让w更小。相反,在等式3.2中,如果w为负,则 λ 将被加到w上,从而使其较少为负。因此,这具有将w推向0的效果。 这在1元线性回归模型中当然毫无意义,但其具有在多元回归模型中 “去除” 无用变量的能力。你也可以认为L1完全减少了模型中的特征数量。以下是L1试图在多元线性回归模型中 “推” 一些变量的示例: y^=0.4561x1−0.0007x2+0.3251x3+0.0009x4+0.0001x5−0.9142x6−0.553\\hat{y}=0.4561 x_{1}-0.0007 x_{2}+0.3251 x_{3}+0.0009 x_{4}+0.0001 x_{5}-0.9142 x_{6}-0.553y^​=0.4561x1​−0.0007x2​+0.3251x3​+0.0009x4​+0.0001x5​−0.9142x6​−0.553 那么,将w推向0如何有助于L1正则化中的过拟合?如上所述,随着w变为0,我们正在通过降低变量的重要性来减少功能的数量。在上面的方程式中,我们看到x_2,x_4和x_5由于系数小而几乎 “无用”,因此我们可以将它们从方程式中删除。这反过来降低了模型的复杂性,使我们的模型更简单。更简单的模型可以减少过拟合的机会。 Note: 虽然L1具有将权重推向0的影响,而L2没有,但这并不意味着由于L2的权重不能达到或者接近0。","s":"L1的稀疏性","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1的稀疏性","p":333},{"i":350,"t":"提示 输入:shape为[5,5,3][5, 5, 3][5,5,3]的图像 输出要求:shape为[5,5,4][5, 5, 4][5,5,4]的feature map 使用3×33 \\times 33×3卷积核,padding=1,stride=1padding=1, stride=1padding=1,stride=1","s":"深度可分离卷积","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"","p":349},{"i":352,"t":"卷积层共4个filter(输出通道为4),每个filter3个kernel(输入通道为3) 其中,每个filter都对输入图像的所有通道完成一次卷积,filter中的kernel分别对输入的通道进行具体卷积运算 不考虑卷积偏置,参数量为 3×3×3×4=108(1)3 \\times 3 \\times 3 \\times 4 = 108 \\tag{1}3×3×3×4=108(1)","s":"常规卷积","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#常规卷积","p":349},{"i":354,"t":"使用1个filter,其中包含3个kernel。每个kernel分别对输入图像的3个通道单独进行卷积,参数量为 3×3××3=27(2)3 \\times 3 \\times \\times 3 = 27 \\tag{2}3×3××3=27(2) 代码实现也较为简单,只需令Conv2d的输出通道与输入通道相同即可","s":"(1)逐通道卷积-Depthwise Convolution","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#1逐通道卷积-depthwise-convolution","p":349},{"i":356,"t":"使用1×11 \\times 11×1卷积核,每个filter对上一步的feature map在深度方向进行一次加权组合,参数量为 1×1×3×4=12(3)1 \\times 1 \\times 3 \\times 4 = 12 \\tag{3}1×1×3×4=12(3) 提示 图片源自知乎","s":"(2)逐点卷积-Pointwise Convolution","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#2逐点卷积-pointwise-convolution","p":349},{"i":358,"t":"提示 正则化与权重衰退","s":"正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"","p":357},{"i":360,"t":"正则化(Regularization)是机器学习中用于控制模型过拟合的一种技术。在模型训练过程中,我们通常要最小化一个损失函数来得到最佳的模型参数。但是当模型过于复杂时,容易出现过拟合现象,即在训练数据上表现很好,但在测试数据上表现很差。这是因为模型过于依赖训练数据的噪声和细节,而忽略了真正的规律。 正则化通过在损失函数中增加一个惩罚项(Penalty)来对模型进行约束,防止其过分依赖训练数据。 常见的正则化方法包括L1正则化(硬性限制)、L2正则化(柔性限制)等。 L1正则化会使得一部分参数变为0,从而实现特征选择的效果;L2正则化则会使得模型参数尽量接近0,也就是使得模型更加平滑。在使用正则化时,需要调整正则化强度的超参数,以达到最优的泛化性能。","s":"一、什么是正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#一什么是正则化","p":357},{"i":362,"t":"min l(w,b) subject to ∥w∥12≤θ(1)min \\space l(w, b) \\space \\text{subject to} \\space \\Vert w \\Vert^2_1 \\leq \\theta \\tag{1}min l(w,b) subject to ∥w∥12​≤θ(1) L1正则化限制权重参数的L1范数小于某一特定的超参数 通常不限制偏移bbb 更小的超参数θ\\thetaθ意味着更强的正则项","s":"二、L1正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#二l1正则化","p":357},{"i":364,"t":"L2正则化是指在模型的损失函数中,加入对模型参数的L2范数进行惩罚的一种方法。公式如下所示: l(w,b)+λ2∥w∥12(2)l(w, b) + \\frac{\\lambda}{2} \\Vert w \\Vert^2_1 \\tag{2}l(w,b)+2λ​∥w∥12​(2) 其中,λ\\lambdaλ是一个正则化系数超参数 此时在更新梯度时,具有如下公式 ∂∂w(l(w,b)+λ2∥w∥12)=∂l(w,b)∂w+λw(3)\\frac{\\partial}{\\partial w} \\big(l(w, b) + \\frac{\\lambda}{2} \\Vert w \\Vert^2_1 \\big) = \\frac{\\partial l(w, b)}{\\partial w} + \\lambda w \\tag{3}∂w∂​(l(w,b)+2λ​∥w∥12​)=∂w∂l(w,b)​+λw(3) wt+1=(1−ηλ)wt+η∂l(wt,bt)∂wt(4)w_{t+1}=(1-\\eta \\lambda)w_t + \\eta \\frac{\\partial l(w_t, b_t)}{\\partial w_t} \\tag{4}wt+1​=(1−ηλ)wt​+η∂wt​∂l(wt​,bt​)​(4) 通常ηλ<1\\eta \\lambda < 1ηλ<1,因此又叫做权重衰退","s":"三、L2正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#三l2正则化与权重衰退","p":357},{"i":367,"t":"K-fold cross-validation is a technique used in machine learning to evaluate the performance of a model. The basic idea behind k-fold cross-validation is to split the dataset into kkk partitions, or folds, and then train and test the model kkk times, using a different fold for testing each time.","s":"What is k-fold cross-validation?","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#what-is-k-fold-cross-validation","p":365},{"i":369,"t":"In each iteration of k-fold cross-validation, one of the kkk folds is used as the test set, while the remaining k−1k-1k−1 folds are used as the training set. This process is repeated kkk times, with each fold being used exactly once as the test set. The results from each iteration can then be averaged to produce a more accurate estimate of the model's performance.","s":"How does k-fold cross-validation work?","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#how-does-k-fold-cross-validation-work","p":365},{"i":371,"t":"train set: to train the model and do parameter update validation set: to choose hyperparameter test set: the final test, only used once","s":"Summary","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#summary","p":365},{"i":374,"t":"AlexNet是指2012年由Alex Krizhevsky、Ilya Sutskever和Geoffrey Hinton提出的一种卷积神经网络模型,它主要应用于图像分类任务。在当时,AlexNet的表现远远超过了其他参赛的网络模型,并且在ImageNet比赛中获得了第一名。 标志着新的一轮神经网络热潮的开始","s":"背景","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#背景","p":372},{"i":376,"t":"ReLU激活函数 Dropout正则化、丢弃法 最大池化MaxPooling","s":"新的概念和技术","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#新的概念和技术","p":372},{"i":378,"t":"由于输入的图片更大,设置了更大的卷积核尺寸和步长 更大的池化窗口,使用最大池化 在卷积层中设置了更大的输出通道,提取更深层的特征、识别更多的模式 激活函数从Sigmoid改成了ReLU,减缓梯度消失 在卷积层和输出层之间仍使用两个全连接隐藏层,但在输出层之前增加了Dropout层做正则化 使用了数据增强data augmentation","s":"与LeNet比较","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#与lenet比较","p":372},{"i":381,"t":"LeNet是由Yann LeCun等人于1998年提出的卷积神经网络结构,该结构由卷积层、池化层和全连接层组成,可以高效地处理手写数字图像,并在MNIST数据集上取得了很好的性能。 LeNet-5的成功标志着卷积神经网络在计算机视觉领域中的崛起,并促进了深度学习的快速发展。","s":"背景","u":"/docs/Deep Learning/经典模型/LeNet","h":"#背景","p":379},{"i":383,"t":"import torch import numpy as np from torch import nn as nn from torch.nn import functional as F from d2l import torch as d2l from matplotlib import pyplot as plt import os os.environ['http_proxy'] = 'http://127.0.0.1:7890' os.environ['https_proxy'] = 'https://127.0.0.1:7890' class LeNetReshape(nn.Module): def __init__(self): super(LeNetReshape, self).__init__() def forward(self, x): return x.reshape(-1, 1, 28, 28) class LeNet5(nn.Module): def __init__(self): super(LeNet5, self).__init__() self.net = torch.nn.Sequential( LeNetReshape(), # 激活函数应为Sigmoid nn.Conv2d(1, 6, kernel_size=5, padding=2), nn.LeakyReLU(), nn.AvgPool2d(kernel_size=2, stride=2), nn.Conv2d(6, 16, kernel_size=5), nn.LeakyReLU(), nn.AvgPool2d(kernel_size=2, stride=2), nn.Flatten(), nn.Linear(16 * 5 * 5, 120), nn.LeakyReLU(), nn.Linear(120, 84), nn.Sigmoid(), nn.Linear(84, 10)) def forward(self, x): return self.net(x) def evaluate_accuracy_gpu(net, data_iter, device=None): if isinstance(net, torch.nn.Module): net.eval() if not device: device = next(iter(net.parameters())).device metric = d2l.Accumulator(2) for X, y in data_iter: if isinstance(X, list): X = [x.to(device) for x in X] else: X = X.to(device) y = y.to(device) metric.add(d2l.accuracy(net(X), y), y.numel()) # 此处accuracy是统计 return metric[0] / metric[1] def accuracy(y_hat, y): return torch.sum(y_hat.argmax(dim=1) == y) def train(net, train_iter, test_iter, num_epochs, lr, device): def init_weights(m): if type(m) == nn.Linear or type(m) == nn.Conv2d: nn.init.xavier_uniform_(m.weight) net.apply(init_weights) net.to(device) optimizer = torch.optim.SGD(net.parameters(), lr=lr) loss = torch.nn.CrossEntropyLoss() loss.to(device) animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs], legend=['train loss', 'train acc', 'test acc']) timer, num_batches = d2l.Timer(), len(train_iter) metric = d2l.Accumulator(3) net.train() for epoch in range(num_epochs): for batch, (X, y) in enumerate(train_iter): timer.start() optimizer.zero_grad() X, y = X.to(device), y.to(device) y_hat = net(X) l = loss(y_hat, y) l.backward() optimizer.step() metric.add(l * X.shape[0], accuracy(y_hat, y), y.numel()) timer.stop() train_l = metric[0] / metric[2] train_acc = metric[1] / metric[2] if (batch + 1) % (num_batches // 5) == 0 or batch == num_batches - 1: animator.add(epoch + (batch + 1) / num_batches, (train_l, train_acc, None)) test_acc = evaluate_accuracy_gpu(net, test_iter) animator.add(epoch + 1, (None, None, test_acc)) print(f'loss {train_l:.3f}, train acc {train_acc:.3f}, test acc {test_acc:.3f}') print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec on {str(device)}') plt.show() batch_size = 256 train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size) lr, num_epochs = 0.9, 10 lenet = LeNet5() train(lenet, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())","s":"代码实现","u":"/docs/Deep Learning/经典模型/LeNet","h":"#代码实现","p":379},{"i":385,"t":"在分类模型中,最后两个全连接层之间不要使用ReLU激活函数。因为ReLU的范围是[0, +∞),它会将所有负数都变成0。而最后一层全连接层输出了类别信息,倒数第二层的输出值包含着非常重要的类别信息,此时使用激活函数很可能会导致信息丢失。","s":"问题","u":"/docs/Deep Learning/经典模型/LeNet","h":"#问题","p":379},{"i":388,"t":"Logistic Regression直译为逻辑回归,是一种用来解决二分类问题的机器学习方法,用于估计某种事物的可能性。 逻辑回归经过sigmoid函数输出的结果可将其视为probability,而后根据设定的置信度阈值来判断该特征向量对应的标签是1还是0,用以解决二分类问题。","s":"一、什么是Logistic Regression","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#一什么是logistic-regression","p":386},{"i":390,"t":"线性回归要求因变量是连续性数值变量,而逻辑回归要求因变量是离散的变量。 逻辑回归以线性回归为理论支持,通过Sigmoid函数引入了非线性因素。 线性回归常用MSE函数作为损失函数,而逻辑回归作为分类任务的解决方案通常搭配交叉熵损失函数进行训练。","s":"二、逻辑回归(Logistic Regression)和线性回归(Linear Regression)","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#二逻辑回归logistic-regression和线性回归linear-regression","p":386},{"i":392,"t":"从历史角度方面看,逻辑回归在诞生时使用MSE作为损失函数,其目标是让输出的概率更接近于1,与回归任务的目标相似。","s":"三、逻辑回归到底是回归任务(Regression)还是分类任务(Classification)?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#三逻辑回归到底是回归任务regression还是分类任务classification","p":386},{"i":394,"t":"逻辑回归以及其他分类任务在测试角度上的目标让提高分类准确率acc,但并不会将maximize accuracy作为数学上的训练方法,即在训练过程中不使用与acc有关的损失函数。 逻辑回归中的训练目标(评估函数)与预测目标(评估函数)并不相同,但方向一致。 acc=∑I(predi==yi)len(Y)(1)acc = \\frac{\\sum{I(pred_i==y_i)}}{len(Y)} \\tag{1}acc=len(Y)∑I(predi​==yi​)​(1) 如果在训练过程中以最大化acc为目标,当参数在训练过程中向标签方向更新使得逻辑回归输出的正确类的概率增大时,考虑以下两种情况: gradient = 0 if accuracy unchanged but weights changed: 由于阈值的存在,下一轮迭代输出的概率可能仍小于阈值,从而导致分类结果与上一次迭代相同,此时acc并无变化,出现梯度为0的情况。 gradient not continuous since the number of correct is not continunous: 当上一轮迭代的输出概率很接近阈值时,下一次迭代的概率提升了很少一点但是仍超过了阈值,且一个batch中有大量样本均存在这种情况,此时acc有显著提升而网络的权重的更新极小,此时,与acc有关的Loss函数对权重求导得到的梯度会出现梯度爆炸或者说不连续的情况。","s":"四、为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#四为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数","p":386},{"i":397,"t":"concat与stack函数 stack函数对输入的两个张量在指定的维度进行堆叠,是创建了新的维度 concat函数对输入的张量在指定维度进行拼接,没有创建新的维度 # stack和concat函数 a = torch.rand(4, 3) # A班4位同学,每位同学3科成绩 b = torch.rand(4, 3) # B班4位同学,每位同学3科成绩 c = torch.stack((a, b), dim=0) # 理解:年级所有同学的3科成绩(假设年级只有A班和B班两个班,每个班只有四名同学) print(c.shape) # torch.Size([2, 4, 3]) d = torch.concat((a, b), dim=1) # 理解:a是A班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6科成绩 print(d.shape) # torch.Size([4, 6]) list和tensor乘法不同之处 list的*乘法是复制元素,改变list的shape tensor的*乘法是对tensor中的元素进行点乘计算 a = torch.tensor([[3, 3, 3, 3]]) b = [3] # list的*乘是复制元素进行扩展 print(a * 3) # tensor([[9, 9, 9, 9]]) print(b * 3) # [3, 3, 3] 最大值 / 最小值索引:argmax / argmin 需要通过参数dim指定操作的维度,dim的理解 官方解释:The dimension to reduce 以二维张量举例,dim=1即在每一行中选出一个最大值 / 最小值元素的索引,索引的shape应为[dim0, 1],即reduce了dim=1的维度 # 最大值最小值索引 a = torch.tensor([[0.1, 0.9, 0.3], [0.9, 0.8, 0.99], [0.1, 0.7, 0.8], [0.88, 0.1, 0.2]]) # [4, 3] print(\"argmax output: \", a.argmax(dim=0), a.argmax(dim=1)) # argmax output: tensor([1, 0, 1]) tensor([1, 2, 2, 0]) Python zip函数 zip函数可以理解为压缩,将输入的两个迭代器的最外层对应元素压缩为一个新的元素 a = torch.tensor([1, 2, 3]) b = torch.tensor([4, 5, 6]) c = zip(a, b) for i in c: print(i) ''' (tensor(1), tensor(4)) (tensor(2), tensor(5)) (tensor(3), tensor(6)) ''' a = torch.tensor([[1, 2, 3], [3, 2, 1]]) b = torch.tensor([[4, 5, 6], [6, 5, 4]]) c = zip(a, b) for i in c: print(i) ''' (tensor([1, 2, 3]), tensor([4, 5, 6])) (tensor([3, 2, 1]), tensor([6, 5, 4])) '''","s":"一、常用函数部分","u":"/docs/Deep Learning/基础知识/PyTroch基础","h":"#一常用函数部分","p":395},{"i":400,"t":"感知机是一种二元线性分类模型,旨在寻找一个超平面(在二维空间中即为一条直线),将不同类别的实例划分到不同的区域。感知机的训练过程包括迭代地对样本进行分类,并根据分类错误的情况调整超平面的参数,使得分类准确率逐步提高。感知机是基础的机器学习算法之一,其思想和方法对神经网络等更复杂的模型也具有启发意义。","s":"一、什么是感知机","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#一什么是感知机","p":398},{"i":402,"t":"输入向量:感知机的输入向量是一个n维向量x=(x1,x2,...,xn)x=(x_1,x_2,...,x_n)x=(x1​,x2​,...,xn​),表示一个样本的各个特征值。 权值向量:感知机的权值向量也是一个n维向量w=(w1,w2,...,wn)w=(w_1,w_2,...,w_n)w=(w1​,w2​,...,wn​),表示每个特征对应的权重。 偏置项:偏置项bbb是一个常数,可看作是模型的截距,用于调整阈值函数的位置。 内积运算:感知机将输入向量和权值向量进行内积运算,并加上偏置项,得到输入信号z=w∗x+bz=w*x+bz=w∗x+b。 阈值函数:将输入信号zzz带入阈值函数,如符号函数sign(z)sign(z)sign(z),即可得到分类结果。 损失函数:感知机使用误分类点到超平面的距离来作为损失函数,即 L(y,z)=max(0,−y∗z)(1)L(y,z)=max(0,-y*z) \\tag{1}L(y,z)=max(0,−y∗z)(1) 其中yyy是样本的真实标签,zzz是预测值。 参数更新:根据当前样本误分类情况来对权值向量www和偏置项bbb进行迭代更新。 收敛条件:当全部训练样本被正确分类或达到最大迭代次数时,感知机算法停止迭代。 感知机训练流程伪代码如下所示: initialize w = 0 and b = 0 repeat if yi * zi <= 0 then w = w + yi * xi and b = b + yi end if until all classified correctly","s":"二、详细原理","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#二详细原理","p":398},{"i":404,"t":"感知机是一个二分类模型,最早的AI模型之一 求解算法等价于使用批量大小为1的梯度下降 要求数据集线性可分,不能拟合XOR异或等非线性问题,导致第一次AI寒冬","s":"三、总结","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#三总结","p":398},{"i":409,"t":"CNN模型的输入向量的形状是固定的,其输出向量的形状也是固定的或可以根据不同的下游任务而唯一确定,即输入形状与下游任务共同确定了一个CNN模型的架构,具有较强的固定性。 信息 在视觉中,输入大多为数字图像,其形状可以大致分为由尺寸和通道数来决定。 从输入图像的尺寸看,当CNN中没有全连接层时,本质上可以接受任意尺寸的输入,但这是狭隘的。若考虑其下游任务以及输出,如FCN(Fully Convolution Network),FCN通过最后通过反卷积将tensor还原到原始图像尺寸,即在CNN中,输入与输出(下游任务的要求)都影响着CNN网络的结构。 从通道数看,CNN本质上可以接受任意通道数的图像输入,但是其模型效果将会受到极大的影响。以一个使用通道数为3的数据集进行训练的CNN模型,但在测试阶段分别使用通道数为 1 和 6 的数据进行推理的情形为例,进行分析: 通道数为1的测试集: 情况: 如果使用通道数为 1 的数据进行推理,即灰度图像,而模型在训练时是使用 RGB 数据集训练的,模型可能会受到一些影响。 解释: 模型可能在训练时学到了关于颜色的特定信息,而在测试时,如果输入是灰度图像,那些颜色信息将不可用。 建议: 在这种情况下,模型可能会失去对颜色信息的敏感性,可能需要进行进一步的调整或微调,以适应灰度图像的特性。 通道数为6的测试集: 情况: 如果使用通道数为 6 的数据进行推理,模型可能会面临额外的挑战,因为它在训练时只见过 3 个通道的数据。 解释: 模型在训练时学到的权重是基于 3 个通道的数据的,对于额外的通道,模型可能无法有效利用这些信息。 建议: 对于通道数不匹配的情况,可以考虑进行通道的适当组合或调整。这可能包括降低通道数(例如,只使用前 3 个通道),或者通过某种方式将 6 个通道映射到 3 个通道,例如通过某种特定的数据预处理。 当模型的输入更复杂(sophisticated),是长度不定的向量序列(sequence)时,CNN不能很好地处理,且不能解决输出由输入和模型自行决定的下游任务,如生成类任务。","s":"输入与输出的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#输入与输出的局限性","p":406},{"i":411,"t":"在CNN中引入了局部连接和权值共享的归纳偏置: 局部连接:CNN使用卷积层通过滑动卷积核在输入上进行局部感受野的操作。每个神经元只与输入的一小部分区域相连,这意味着每个神经元只能接触到局部的上下文信息。这样的设计使得CNN更适用于处理图像等数据,其中局部结构通常很重要。 权值共享: CNN的参数共享使得模型能够学习到图像中的局部特征,这也是一种对于上下文的假设。相邻位置上的权重共享使得模型能够对局部结构进行建模,并且这种权重共享使得CNN具有更强的归纳偏置。 CNN的设计理念认为:在图像任务中,局部结构通常更为重要,局部连接和权值共享使得CNN更适用于图像处理等任务。 但也正是这种设计理念,使得CNN在面临长输入序列时不能很好地综合上下文信息、提取位置信息,因此Self-Attention应运而生,允许每个位置关注到序列中地所有其他位置。这种全局关联性质使得Transformer能够捕捉序列中的长距离依赖关系。","s":"关联上下文的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#关联上下文的局限性","p":406},{"i":413,"t":"提示 欢迎来到笔记本的深度学习部分","s":"Welcome","u":"/docs/Deep Learning/intro","h":"","p":412},{"i":415,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Deep Learning/intro","h":"#支持我","p":412},{"i":417,"t":"提示 对于TensorFlow框架,可以使用TensorBoard实现可视化。 对于PyTorch框架,可以使用Visdom或TensorBoardX实现可视化,本篇主要讲述Visdom。","s":"Visdom可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"","p":416},{"i":419,"t":"pip install visdom","s":"一、安装Visdom","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#一安装visdom","p":416},{"i":422,"t":"首先要通过终端启动Visdom,使用本机端口运行服务器。 以下二者均可。 visdom python -m visdom.server","s":"0. Visdom的启动","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#0-visdom的启动","p":416},{"i":424,"t":"from visdom import Visdom vis = Visdom() # 实例化 # 创建一条曲线,前两个参数分别为y轴数据、x轴数据,win参数是窗口的唯一标识,opt可选字典中可以给出窗口的title和legend vis.line([0.], [0.], win='win_id', opts=dict(title=\"win_title\")) # 在训练过程中的合适位置向初始化的曲线中喂数据 # viz.line([real_y_data], [global_step], win='win_id', update='append') # 查看训练loss vis.line([loss.item()], [epoch], win='win_id', update='append') # 对于非image数据,在传入visdom时仍需要先转化为numpy类型","s":"1. 单窗口单曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#1-单窗口单曲线的可视化","p":416},{"i":426,"t":"from visdom import Visdom vis = Visdom() vis.line([[0., 0.]], [0.], win='win_id', opts=dic(title=\"win_title\", legend=[\"curve_name_1\", \"curve_name_2\"])) # 在训练过程中的合适位置向初始化的曲线中喂数据 viz.line([[y1, y2]], [global_step], win='win_id', update='append')","s":"2. 单窗口多曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#2-单窗口多曲线的可视化","p":416},{"i":428,"t":"通过编写脚本函数的方式,手动开启代理","s":"终端代理","u":"/docs/Linux/实用工具/终端代理","h":"","p":427},{"i":430,"t":"新建脚本文件terminal_proxy.sh # 开启代理 function proxy_on(){ export ALL_PROXY=socks5://127.0.0.1:7890 export http_proxy=http://127.0.0.1:7890 export https_proxy=https://127.0.0.1:7890 echo -e \"已开启代理\" } # 关闭代理 function proxy_off(){ unset ALL_PROXY unset http_proxy unset https_proxy echo -e \"已关闭代理\" }","s":"一、编写脚本","u":"/docs/Linux/实用工具/终端代理","h":"#一编写脚本","p":427},{"i":432,"t":"fish的配置文件:~/.config/fish/config.fish zsh的配置文件:~/.zshrc bash的配置文件:~/.bashrc 在配置文件末尾添加以下代码 source /path/terminal_proxy.sh","s":"二、关联终端配置文件","u":"/docs/Linux/实用工具/终端代理","h":"#二关联终端配置文件","p":427},{"i":434,"t":"在终端中输入以下命令即可开启代理 proxy_on 在终端中输入以下命令即可关闭代理 proxy_off","s":"三、使用","u":"/docs/Linux/实用工具/终端代理","h":"#三使用","p":427},{"i":437,"t":"dock显示的图标是全局图标,程序启动器的desktop文件位于/usr/share/applications中,全局主题中图标主题的程序logo位于~/.local/share/icons/Mkos-Big-Sur-Night/128x128/apps(deppending on specific situation)中。在logo文件夹中挑选想要的logo,在desktop中的icon位置修改即可 应用更新的时候会同时更新.desktop文件,因此在更换图标是最好直接更换在主题文件中替换icon,而不是更改desktop的icon路径 Finder小组件中application title文字不能垂直居中,可以更换为Window title插件","s":"一、latte-dock","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#一latte-dock","p":435},{"i":439,"t":"Finder栏中Plasmoids左半部分从左至右依次为: kpple menu application title/window titile(if the text of application title can't be centered vertically) global menu 右半部分从左至右依次为: resources monitor (fork) mcOS BS Inline Battery 网络 Control Center(replace the icon with search icon) Control Center(replace the icom with menu icon) Better Inline Clock 安装方法: plasmpkg2 -u xxx.plasmoid","s":"二、Kde Plasmoids","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#二kde-plasmoids","p":435},{"i":442,"t":"未知,可能是由Windows休眠模式导致","s":"一、发生原因","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#一发生原因","p":440},{"i":444,"t":"使用ntfsfix修复ntfs磁盘 安装ntfsfix yay -S ntfsfix 查看问题分区 df -h 修复 sudo ntfsfix /dev/your_partition 重启 reboot","s":"二、解决方案","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#二解决方案","p":440},{"i":446,"t":"提示 欢迎来到笔记本的杂记部分","s":"Welcome","u":"/docs/Others/intro","h":"","p":445},{"i":448,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Others/intro","h":"#支持我","p":445},{"i":450,"t":"提示 欢迎来到笔记本的Linux部分","s":"Welcome","u":"/docs/Linux/intro","h":"","p":449},{"i":452,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Linux/intro","h":"#支持我","p":449},{"i":454,"t":"告示栏的启用 在docusaurus.config.js的themeConfig中加入以下代码 announcementBar: { id: 'announcementBar-3', content: 'Welcome to my notebook!', isCloseable: false, }, 告示栏的背景个性化 在custom.css中加入以下代码 div[class^='announcementBar_'] { background: repeating-linear-gradient( -35deg, var(--ifm-color-primary-lighter), var(--ifm-color-primary-lighter) 20px, var(--ifm-color-primary-lightest) 10px, var(--ifm-color-primary-lightest) 40px ); font-weight: 700; }","s":"告示栏","u":"/docs/Others/博客搭建/告示栏","h":"","p":453},{"i":456,"t":"提示 保研面试中需要准备的问题,夏令营、预推免均可参考 参考视频:https://www.bilibili.com/video/BV1564y1e7b9/?spm_id_from=333.999.0.0&vd_source=24d8fcf68bc0e2b0003defe0995cf533","s":"要准备的问题","u":"/docs/Others/面试/要准备的问题","h":"","p":455},{"i":458,"t":"中文长、短自我介绍(1min、5min) 英文自我介绍(放在中文里的、全程英文的)","s":"一、自我介绍部分","u":"/docs/Others/面试/要准备的问题","h":"#一自我介绍部分","p":455},{"i":460,"t":"数据库bc范式和第三范式区别 特征值和特征向量的意义以及之间的关系 TCP/IP的工作过程描述 在局域网中TCP/IP协议栈是否冗余 列举各种排序算法以及复杂度 栈和队列的区别 如何用两个栈实现队列 使用双指针把负数移到正数前,要求控制到O(n)复杂度 动态规划 询问什么情况下要使用动态规划? TCP和UDP之间的区别 解释什么是中心极限定理 怎样快速找到数组中第k大的数?","s":"二、专业课面试题","u":"/docs/Others/面试/要准备的问题","h":"#二专业课面试题","p":455},{"i":462,"t":"最近阅读的论文 对于人脑和机器学习的思考","s":"三、自由面试题","u":"/docs/Others/面试/要准备的问题","h":"#三自由面试题","p":455}],"index":{"version":"2.3.9","fields":["t"],"fieldVectors":[["t/3",[0,0.427,1,3.185,2,2.227,3,2.227,4,2.227,5,1.83,6,4.108,7,2.227,8,2.35,9,2.227,10,3.322,11,2.35,12,2.227,13,2.35,14,2.703,15,1.413,16,2.958,17,3.985,18,3.065,19,2.958,20,3.185,21,3.322,22,4.119,23,2.488,24,2.227,25,2.227,26,2.35,27,2.227,28,2.35,29,2.124,30,2.35,31,3.482,32,3.482,33,3.482,34,2.35,35,2.35,36,2.124,37,2.227,38,2.35,39,2.227,40,2.124,41,2.35,42,2.35,43,2.35,44,2.35]],["t/5",[0,0.424,1,3.202,2,2.243,3,2.243,4,2.243,5,1.843,6,4.118,7,2.243,8,2.367,9,2.243,10,3.34,11,2.367,12,2.243,13,2.367,14,2.71,15,1.421,16,2.974,17,4,18,3.081,19,2.974,20,3.202,21,3.34,22,4.134,23,2.501,24,2.243,25,2.243,26,2.367,27,2.243,28,2.367,29,2.139,30,2.367,31,3.501,32,3.501,33,3.501,34,2.367,35,2.367,36,2.139,37,2.243,38,2.367,39,2.243,40,2.139,41,2.367,42,2.367,43,2.367,44,2.367]],["t/7",[0,0.43,40,3.769,45,4.17,46,4.81,47,4.17,48,4.444,49,4.81,50,4.444,51,5.944,52,4.81,53,4.81,54,3.247,55,4.444,56,4.444,57,4.444,58,4.81,59,3.951,60,4.81,61,3.769,62,4.81,63,4.81,64,4.444]],["t/9",[0,0.435,65,6.034,66,6.034,67,6.034,68,5.408,69,4.996,70,6.034,71,6.034,72,6.034,73,7.752,74,6.034,75,6.034,76,6.034,77,5.408]],["t/11",[0,0.44,14,1.118,15,1.275,78,1.597,79,1.355,80,1.499,81,1.597,82,2.712,83,4.376,84,1.597,85,3.528,86,1.597,87,2.825,88,1.597,89,1.597,90,1.597,91,1.597,92,1.597,93,3.259,94,1.355,95,1.597,96,2.712,97,1.597,98,1.597,99,1.597,100,3.651,101,1.597,102,1.597,103,1.597,104,1.597,105,1.355,106,1.597,107,1.597,108,3.142,109,1.597,110,2.712,111,1.597,112,1.597,113,1.597,114,1.597,115,1.597,116,1.597,117,1.597,118,2.545,119,1.597,120,2.712,121,1.597,122,2.712,123,1.597,124,1.597,125,1.597,126,1.597,127,2.205,128,1.597,129,1.597,130,3.534,131,3.065,132,1.42,133,2.712,134,1.42,135,1.597,136,3.794,137,2.712,138,2.712,139,1.597,140,1.597,141,1.597,142,1.597,143,1.597,144,1.597,145,1.597,146,1.597,147,1.597,148,1.597,149,1.597]],["t/13",[0,0.439,14,1.122,15,1.279,78,1.604,79,1.361,80,1.505,81,1.604,82,2.722,83,4.381,84,1.604,85,3.535,86,1.604,87,2.832,88,1.604,89,1.604,90,1.604,91,1.604,92,1.604,93,3.269,94,1.361,95,1.604,96,2.722,97,1.604,98,1.604,99,1.604,100,3.66,101,1.604,102,1.604,103,1.604,104,1.604,105,1.361,106,1.604,107,1.604,108,3.153,109,1.604,110,2.722,111,1.604,112,1.604,113,1.604,114,1.604,115,1.604,116,1.604,117,1.604,118,2.554,119,1.604,120,2.722,121,1.604,122,2.722,123,1.604,124,1.604,125,1.604,126,1.604,127,2.214,128,1.604,129,1.604,130,3.546,131,3.071,132,1.427,133,2.722,134,1.427,135,1.604,136,3.804,137,2.722,138,2.722,139,1.604,140,1.604,141,1.604,142,1.604,143,1.604,144,1.604,145,1.604,146,1.604,147,1.604,148,1.604,149,1.604]],["t/15",[0,0.454]],["t/17",[0,0.419]],["t/19",[0,0.427,1,3.185,2,2.227,3,2.227,4,2.227,5,1.83,6,4.108,7,2.227,8,2.35,9,2.227,10,3.322,11,2.35,12,2.227,13,2.35,14,2.703,15,1.413,16,2.958,17,3.985,18,3.065,19,2.958,20,3.185,21,3.322,22,4.119,23,2.488,24,2.227,25,2.227,26,2.35,27,2.227,28,2.35,29,2.124,30,2.35,31,3.482,32,3.482,33,3.482,34,2.35,35,2.35,36,2.124,37,2.227,38,2.35,39,2.227,40,2.124,41,2.35,42,2.35,43,2.35,44,2.35]],["t/21",[0,0.424,1,3.202,2,2.243,3,2.243,4,2.243,5,1.843,6,4.118,7,2.243,8,2.367,9,2.243,10,3.34,11,2.367,12,2.243,13,2.367,14,2.71,15,1.421,16,2.974,17,4,18,3.081,19,2.974,20,3.202,21,3.34,22,4.134,23,2.501,24,2.243,25,2.243,26,2.367,27,2.243,28,2.367,29,2.139,30,2.367,31,3.501,32,3.501,33,3.501,34,2.367,35,2.367,36,2.139,37,2.243,38,2.367,39,2.243,40,2.139,41,2.367,42,2.367,43,2.367,44,2.367]],["t/23",[0,0.43,40,3.769,45,4.17,46,4.81,47,4.17,48,4.444,49,4.81,50,4.444,51,5.944,52,4.81,53,4.81,54,3.247,55,4.444,56,4.444,57,4.444,58,4.81,59,3.951,60,4.81,61,3.769,62,4.81,63,4.81,64,4.444]],["t/27",[0,0.461]],["t/29",[0,0.448]],["t/31",[0,0.372]],["t/34",[150,7.552,151,3.304,152,6.891,153,5.706,154,6.891,155,5.706,156,6.176,157,6.176,158,6.891,159,6.891]],["t/36",[151,3.411,153,5.892,155,5.892,160,7.115,161,6.378,162,7.701,163,7.115,164,7.115]],["t/38",[0,0.379,14,1.952,15,1.708,150,5.125,153,4.735,155,4.735,156,5.125,157,6.71,161,5.125,162,5.125,165,5.718,166,5.125,167,5.718,168,7.487,169,5.718,170,4.443,171,5.718,172,5.718,173,5.718,174,5.718]],["t/40",[0,0.372]],["t/44",[0,0.449,15,1.731,175,7.979,176,5.794,177,5.794,178,5.794,179,5.794,180,5.794,181,5.794,182,5.794,183,5.794,184,5.794]],["t/48",[0,0.415,185,7.887]],["t/51",[0,0.422,186,9.295,187,7.233,188,7.233]],["t/53",[0,0.365,189,8.032]],["t/56",[0,0.419]],["t/58",[190,9.227]],["t/60",[0,0.465]],["t/63",[0,0.448]],["t/65",[0,0.434,191,6.944]],["t/68",[0,0.448]],["t/70",[0,0.439,93,3.565,175,7.819,192,5.502,193,5.502,194,4.931,195,5.502,196,5.502,197,4.931,198,5.502,199,5.502,200,4.931,201,5.502,202,5.502,203,5.502,204,5.502,205,5.502]],["t/73",[0,0.458]],["t/75",[0,0.446,194,7.409,197,7.409,200,5.988,206,6.68,207,6.68]],["t/77",[0,0.45,151,3.526,208,5.416]],["t/79",[0,0.448,151,4.16,208,5.326]],["t/81",[0,0.448]],["t/85",[0,0.433,209,5.952,210,5.335,211,5.952,212,6.056,213,5.952,214,5.952,215,5.952,216,5.952,217,5.335,218,5.952,219,4.625,220,5.335]],["t/87",[212,5.216,221,7.747,222,7.747,223,7.747]],["t/89",[0,0.41,212,4.568,217,6.081,219,5.271,220,6.081,224,6.784,225,6.081,226,6.784,227,6.784,228,6.784]],["t/91",[0,0.419]],["t/93",[0,0.424,151,3.154,212,5.514,229,5.112,230,6.781,231,8.189,232,6.579,233,5.897]],["t/95",[0,0.442,14,0.588,22,1.078,47,1.339,79,1.21,87,2.34,127,1.16,210,1.545,212,3.879,219,1.339,225,1.545,229,2.311,230,3.865,234,2.666,235,1.723,236,1.723,237,1.723,238,2.974,239,1.723,240,1.545,241,1.723,242,1.723,243,1.723,244,2.974,245,4.667,246,1.723,247,1.723,248,2.974,249,4.667,250,4.667,251,2.974,252,1.723,253,1.723,254,1.723,255,1.723,256,1.723,257,2.974,258,2.974,259,1.923,260,1.723,261,1.723,262,2.974,263,1.723,264,2.666,265,1.723,266,1.545,267,1.723,268,1.545,269,1.723,270,1.723,271,1.723,272,1.723,273,1.723,274,1.723,275,1.723,276,1.545,277,1.723,278,1.723,279,1.723,280,1.339,281,1.723,282,1.723,283,1.723,284,1.723,285,1.545,286,1.545,287,1.723,288,2.974,289,2.974,290,2.974,291,1.723,292,1.723,293,1.723,294,1.427,295,1.545,296,1.545,297,1.545,298,1.545,299,1.545,300,1.545,301,1.545,302,1.545,303,2.666,304,1.545,305,1.545,306,1.545,307,1.427,308,1.545,309,2.666,310,1.545,311,2.666,312,1.545,313,1.545,314,2.666,315,1.545,316,1.545,317,1.545,318,1.545,319,1.545,320,3.516,321,1.545,322,1.545,323,1.545,324,1.545,325,3.516,326,1.545,327,1.427,328,1.545,329,1.545,330,1.427,331,1.545,332,1.723]],["t/98",[333,7.887,334,7.887,335,7.887]],["t/101",[336,9.821,337,6.68,338,6.68,339,6.68,340,6.68,341,6.68,342,6.68,343,6.68]],["t/103",[0,0.445,344,7.612]],["t/108",[14,2.742,345,8.032]],["t/110",[0,0.45,87,2.393,166,1.321,212,1.747,229,1.145,234,2.325,259,0.722,330,2.878,346,1.473,347,1.473,348,2.441,349,2.594,350,2.594,351,2.594,352,2.325,353,2.594,354,6.04,355,2.594,356,2.594,357,2.594,358,2.594,359,2.594,360,1.168,361,4.186,362,2.594,363,2.594,364,2.594,365,2.594,366,2.594,367,2.594,368,2.149,369,2.594,370,2.594,371,0.839,372,1.622,373,2.594,374,5.263,375,2.594,376,2.594,377,2.594,378,2.594,379,3.475,380,1.473,381,1.473,382,0.839,383,1.473,384,1.473,385,1.473,386,1.473,387,1.035,388,1.473,389,1.473,390,1.473,391,1.473,392,1.473,393,1.473,394,1.473,395,1.473,396,1.473,397,1.473,398,1.473,399,1.473,400,1.473,401,1.22,402,4.157,403,1.473,404,1.473,405,1.473,406,1.473,407,1.473,408,1.473,409,1.473,410,1.473,411,1.473,412,1.473,413,1.473,414,1.473,415,1.321,416,1.473,417,1.473,418,1.473,419,1.473,420,1.321,421,1.321,422,1.473,423,1.473,424,1.473,425,1.473,426,1.473,427,1.473,428,1.473,429,1.473,430,1.473,431,1.473,432,1.473,433,1.473,434,1.473,435,2.594,436,1.473,437,1.473,438,1.473,439,1.473,440,1.473,441,1.321,442,1.473,443,1.321,444,1.473,445,1.22,446,1.321,447,1.473,448,1.473]],["t/112",[0,0.431,87,1.667,100,2.154,127,2.238,212,2.238,219,3.953,229,2.583,230,5.734,259,1.629,264,2.979,294,2.752,295,2.979,296,2.979,297,2.979,298,2.979,299,2.979,300,2.979,301,2.979,302,2.979,303,4.56,304,2.979,305,2.979,306,2.979,307,2.752,308,2.979,309,4.56,310,2.979,311,4.56,312,2.979,313,2.979,314,4.56,315,2.979,316,2.979,317,2.979,318,2.979,319,2.979,320,5.54,321,2.979,322,2.979,323,2.979,324,2.979,325,5.54,326,2.979,327,2.752,328,2.979,329,2.979,330,2.752,331,2.979,449,3.324,450,3.324,451,3.324,452,3.324]],["t/114",[0,0.419]],["t/116",[453,5.747]],["t/118",[0,0.294,454,8.855,455,6.482,456,6.482,457,6.482,458,6.482,459,6.482,460,6.482,461,5.81,462,6.482,463,6.482,464,6.482,465,6.482]],["t/120",[0,0.358,466,6.413]],["t/122",[0,0.372]],["t/124",[0,0.467,14,2.696,23,1.58,151,1.44,360,2.955,467,3.304,468,2.692,469,5.881,470,5.197,471,5.197,472,5.881,473,6.772,474,1.878,475,2.692,476,2.692,477,4.216,478,2.692,479,2.692,480,1.621]],["t/126",[0,0.466,14,2.657,15,0.609,23,1.071,131,1.609,151,2.788,259,0.998,360,3.019,368,1.046,371,1.16,402,1.232,467,3.12,468,1.826,469,4.67,470,3.981,471,3.981,472,5.212,473,5.648,474,2.778,475,1.826,476,1.826,477,3.074,478,1.826,479,1.826,480,1.85,481,1.583,482,1.16,483,1.16,484,1.583,485,1.274,486,2.037,487,3.429,488,2.037,489,2.037,490,2.037,491,2.037,492,2.991,493,2.037,494,2.037,495,2.037,496,1.431]],["t/129",[0,0.465,15,2.632,131,2.649,368,3.811,497,5.644,498,5.644]],["t/131",[0,0.461,499,7.001,500,7.001]],["t/134",[0,0.467,85,3.844,501,4.147,502,4.147,503,4.147,504,4.147,505,4.147,506,4.147,507,4.147,508,4.147,509,4.367,510,4.147,511,4.147,512,3.831,513,4.147]],["t/138",[514,7.887,515,7.887,516,7.887]],["t/140",[0,0.425,77,5.408,131,2.832,517,6.024,518,7.752,519,6.034,520,6.034,521,6.034,522,6.034,523,6.034,524,6.034,525,6.034,526,6.034,527,6.034]],["t/142",[0,0.43,15,1.881,61,4.422,528,4.422,529,6.295,530,6.295,531,7.965,532,6.295,533,6.295,534,6.295,535,6.295,536,5.642]],["t/144",[0,0.394,537,7.233,538,7.233,539,7.233,540,6.483,541,4.008,542,7.233]],["t/147",[543,8.182]],["t/149",[0,0.394,15,1.3,54,2.632,131,2.042,382,2.478,528,4.361,541,2.41,544,4.351,545,4.351,546,7.24,547,6.209,548,4.351,549,4.351,550,4.351,551,4.351,552,4.351,553,7.24,554,4.351,555,4.351,556,4.351,557,4.351,558,2.632,559,4.351,560,4.351,561,4.351,562,4.351,563,4.351,564,4.351,565,4.351,566,6.209,567,4.351,568,4.351,569,4.351,570,4.351,571,4.351,572,4.351]],["t/151",[0,0.299,15,1.966,45,5.112,83,3.377,131,3.088,541,3.645,573,6.579,574,6.579,575,6.579,576,6.579,577,6.579,578,6.579,579,6.579,580,6.579]],["t/153",[0,0.446,15,1.996,80,5.191,83,3.429,131,3.135,536,5.988,581,6.68,582,6.68]],["t/155",[0,0.41,583,7.747,584,7.747]],["t/157",[15,2.446,87,4.47,382,5.079,585,6.781,586,6.579,587,6.579,588,6.579,589,6.579]],["t/159",[15,2.197,87,4.394,382,4.19,585,6.09,590,7.355,591,7.355]],["t/161",[15,2.126,87,3.568,382,4.894,541,3.942,585,5.892,592,7.115,593,7.115,594,7.115]],["t/163",[0,0.328,541,5.15,595,7.233,596,7.233,597,7.233,598,7.233]],["t/165",[61,6.347,599,7.747,600,7.747]],["t/167",[0,0.308,15,2.027,29,4.765,87,4.184,382,3.865,601,6.784,602,6.784,603,6.784,604,6.784,605,6.784,606,6.784]],["t/169",[0,0.434,607,7.747]],["t/172",[608,6.241,609,6.651]],["t/174",[0,0.394,15,2.161,131,3.395,608,6.742,609,7.185]],["t/176",[0,0.394,15,2.161,131,3.395,608,6.742,609,7.185]],["t/179",[0,0.471,368,4.028,610,4.532,611,4.532,612,4.532,613,4.532,614,4.532]],["t/181",[0,0.467,85,3.844,501,4.147,502,4.147,503,4.147,504,4.147,505,4.147,506,4.147,507,4.147,508,4.147,509,4.367,510,4.147,511,4.147,512,3.831,513,4.147]],["t/183",[0,0.419]],["t/185",[453,5.747]],["t/187",[0,0.458]],["t/189",[0,0.41,610,6.944,611,6.944]],["t/191",[0,0.472,368,4.127,612,4.752,613,4.752,614,4.752]],["t/193",[0,0.365,615,8.032]],["t/195",[0,0.46,6,3.906,14,2.431,23,3.746,54,4.713,108,3.11,259,2.07,360,3.208,371,2.407,402,2.556,480,2.279,481,3.282,482,2.407,483,2.407,492,2.844,496,2.967,616,4.224,617,4.224]],["t/197",[0,0.46,6,3.776,14,2.571,15,1.397,23,4.289,54,3.954,151,2.242,360,2.943,474,2.925,618,4.677,619,7.531,620,3.872]],["t/199",[0,0.419]],["t/201",[453,5.747]],["t/203",[0,0.428,14,3.219,621,6.784,622,6.784,623,6.784,624,6.784]],["t/205",[0,0.459,5,3.671,14,2.656,15,1.812,85,3.114,94,3.492,259,1.581,360,3.316,371,1.838,402,1.952,480,1.74,481,2.506,482,1.838,483,1.838,485,2.017,625,3.226,626,4.456,627,4.456,628,3.226,629,3.226,630,3.226,631,6.067,632,6.817,633,5.297,634,3.226]],["t/207",[0,0.323,14,2.429,635,7.115,636,7.115,637,7.115,638,7.115,639,7.115,640,7.115,641,7.115]],["t/209",[0,0.328,15,2.161,642,7.233,643,8.677,644,7.233,645,7.233,646,7.233]],["t/211",[0,0.458,14,2.797,15,1.847,23,1.748,151,2.439,259,1.629,285,5.54,360,3.119,371,1.894,402,2.011,446,6.69,467,2.335,474,2.079,480,1.793,481,2.583,482,1.894,483,1.894,484,2.583,492,3.426,496,3.574,620,2.752,647,6.181,648,3.324,649,4.213,650,3.324,651,3.324,652,3.324]],["t/213",[0,0.428,87,3.401,653,6.081,654,6.784,655,8.345,656,6.784,657,6.784,658,6.784]],["t/215",[659,8.182]],["t/217",[0,0.464,5,2.159,87,1.789,352,8.399,387,3.771,517,5.016,660,4.446,661,3.568,662,3.568,663,5.369,664,5.369,665,5.369,666,5.369,667,5.369,668,3.568,669,3.568]],["t/220",[0,0.465,670,4.941,671,4.941,672,4.941,673,4.941,674,4.941,675,4.941,676,4.941,677,4.941,678,4.941,679,4.941,680,4.941,681,4.941,682,4.941,683,4.941,684,4.941,685,4.941]],["t/222",[0,0.462,79,3.593,105,3.593,259,2.507,467,3.593,482,2.914,483,2.914,686,4.585,687,4.585,688,3.593,689,4.235,690,3.593,691,3.314,692,3.975,693,4.235,694,5.115,695,5.115]],["t/224",[259,3.041,482,3.535,483,3.535,696,6.205,697,6.205,698,6.205,699,6.205,700,6.205,701,6.205,702,6.205,703,6.205,704,6.205,705,4.822,706,6.205,707,6.205,708,6.205,709,6.205,710,6.205]],["t/226",[0,0.434,6,2.682,15,1.598,23,1.397,54,3.236,360,2.758,466,4.301,509,1.789,528,1.866,711,5.901,712,2.657,713,2.657,714,2.657,715,2.657,716,2.657,717,2.657,718,1.866,719,2.657,720,2.657,721,2.657,722,2.657,723,2.657,724,2.064,725,2.657,726,4.758,727,4.267,728,2.381,729,2.657,730,2.381,731,2.657,732,2.657,733,2.657,734,2.657,735,2.657,736,2.2,737,2.381,738,2.381,739,5.069,740,2.657,741,2.657,742,2.657,743,2.657,744,2.657,745,2.657,746,2.657,747,2.381,748,2.657,749,2.381,750,2.657,751,2.657,752,2.657,753,2.657,754,2.657,755,2.657,756,2.657,757,2.657]],["t/229",[0,0.379,360,3.055,372,4.243,758,6.081,759,4.765,760,5.617,761,6.784,762,4.995,763,6.784,764,6.081,765,6.784]],["t/231",[0,0.455,23,2.267,87,1.348,360,2.429,368,1.38,372,4.221,480,1.451,485,1.682,758,4.833,759,1.889,766,3.97,767,2.689,768,2.689,769,2.689,770,2.689,771,2.689,772,2.689,773,2.689,774,2.689,775,2.689,776,2.689,777,2.689,778,2.689,779,2.689,780,2.689,781,2.792,782,5.107,783,2.41,784,2.227,785,2.09,786,2.41,787,2.227,788,2.227,789,2.41,790,5.588,791,2.227,792,2.227,793,3.97,794,3.568,795,2.227,796,2.227,797,2.227,798,2.227,799,2.41,800,2.41,801,2.41,802,2.227,803,2.227,804,2.227,805,1.889,806,2.41,807,1.98]],["t/233",[0,0.299,382,3.748,739,4.43,781,4.263,793,4.844,808,6.579,809,6.579,810,6.928,811,6.579,812,6.579,813,6.579,814,6.579]],["t/236",[0,0.408,6,2.984,22,3.722,23,3.131,85,3.055,360,3.833,762,4.382,764,6.888,815,6.266,816,5.952,817,5.952,818,5.952,819,5.952]],["t/238",[0,0.435,83,3.097,360,3.858,739,4.063,815,4.443,820,7.752,821,6.034,822,6.034,823,6.034,824,6.034,825,6.034,826,6.034]],["t/240",[0,0.445,14,2.118,360,3.555,509,4.178,815,5.811,827,6.205,828,6.205,829,6.205,830,6.205,831,6.205]],["t/242",[0,0.358,832,7.887,833,7.887]],["t/244",[0,0.443,15,1.262,83,2.168,87,2.118,131,1.982,633,3.282,653,5.449,692,3.282,834,4.224,835,4.224,836,4.224,837,4.224,838,4.224,839,4.224,840,4.224,841,6.079,842,4.224,843,4.224,844,4.224,845,4.224,846,4.224,847,4.224,848,4.224,849,4.224,850,4.224,851,7.79,852,4.224,853,4.224,854,4.224,855,4.224,856,4.224,857,4.224,858,4.224]],["t/246",[0,0.469,268,1.533,368,2.383,401,1.416,402,1.035,445,1.416,689,1.416,859,1.71,860,1.71,861,1.71,862,1.71,863,1.71,864,1.71,865,1.71,866,1.71,867,1.71,868,1.71,869,1.71,870,1.71,871,1.71,872,1.71,873,1.71,874,1.71,875,1.71,876,1.533,877,1.71,878,1.71,879,1.71,880,1.71,881,1.71,882,1.71,883,1.71,884,1.71,885,1.71,886,1.71,887,1.71,888,1.71,889,1.71,890,1.71,891,1.71,892,1.71,893,1.533,894,1.71,895,1.71,896,1.71,897,1.71,898,1.71,899,1.71,900,1.71,901,1.71,902,1.533,903,1.71,904,1.71,905,2.954,906,1.71,907,1.71,908,1.71,909,1.533,910,1.71,911,1.71,912,1.71,913,1.71,914,1.71,915,1.71,916,1.71,917,1.71,918,1.71,919,1.71,920,1.71,921,1.71,922,1.71,923,1.71,924,1.71,925,1.71,926,1.71,927,1.71,928,1.71,929,1.71,930,1.71,931,1.71,932,1.71,933,1.71,934,1.71,935,1.71,936,1.71,937,1.71,938,1.71,939,1.71,940,1.533,941,1.71,942,1.71,943,1.71,944,1.71,945,1.71]],["t/248",[0,0.46,6,2.398,14,1.276,15,0.675,151,1.083,208,1.663,259,1.107,360,2.154,368,2.455,371,1.286,402,2.262,474,1.412,480,1.218,482,1.286,483,1.286,485,3.476,492,2.517,496,1.586,620,1.87,649,1.87,688,4.935,759,1.586,946,2.258,947,2.258,948,2.258,949,2.258,950,2.258,951,6.297,952,2.258,953,2.258,954,2.258,955,2.258,956,1.663,957,2.258,958,2.024,959,4.782,960,4.286,961,2.024,962,2.024,963,4.286,964,2.024,965,2.258,966,2.258,967,2.258,968,3.738,969,2.024,970,1.87,971,2.258,972,2.258,973,2.258,974,2.258,975,2.258,976,1.87,977,2.258,978,2.258,979,2.258,980,2.258]],["t/250",[0,0.453,6,2.424,105,4.165,280,2.417,368,2.482,759,2.185,951,5.993,960,5.993,961,2.788,962,2.788,963,4.333,964,2.788,969,2.788,981,3.111,982,4.834,983,3.111,984,3.111,985,3.111,986,3.111,987,3.111,988,3.111,989,3.111,990,3.111,991,2.576,992,3.111,993,2.29,994,2.29,995,4.834,996,2.417,997,2.417,998,2.417,999,3.111,1000,3.111,1001,3.111,1002,3.111,1003,5.929,1004,3.111,1005,3.111,1006,3.111,1007,3.111,1008,3.111,1009,3.111,1010,3.111,1011,3.111,1012,3.111]],["t/252",[0,0.454,87,3.29,151,1.44,401,2.487,467,6.418,762,2.212,893,2.692,902,2.692,1013,3.004,1014,3.004,1015,3.004,1016,3.004,1017,3.004,1018,3.004,1019,3.004,1020,3.004,1021,3.004,1022,3.004,1023,3.004,1024,3.004,1025,3.004,1026,3.004,1027,3.004,1028,3.004,1029,3.004,1030,3.004,1031,3.004,1032,3.004,1033,3.004,1034,3.004,1035,3.004,1036,3.004,1037,3.004,1038,3.004,1039,3.004,1040,3.004,1041,3.004,1042,3.004,1043,3.004,1044,3.004,1045,4.704,1046,2.692,1047,2.487,1048,3.004,1049,3.004,1050,3.004]],["t/254",[0,0.372,294,5.448,484,5.112,509,4.43,760,5.448,1051,6.579,1052,6.579,1053,6.579,1054,6.579,1055,6.579,1056,6.579,1057,6.579,1058,6.579]],["t/256",[0,0.461,280,3.047,360,2.594,443,5.162,690,5.628,1059,3.921,1060,3.921,1061,3.921,1062,3.921,1063,5.759,1064,5.759,1065,5.759,1066,3.921,1067,3.921,1068,3.921,1069,3.921,1070,3.921,1071,3.921,1072,3.921,1073,3.921,1074,3.921,1075,3.921,1076,3.921,1077,3.921,1078,3.921]],["t/258",[0,0.448,79,5.863,151,2.741,474,3.576,805,4.016,956,4.21,1079,5.718,1080,5.718,1081,5.125,1082,5.718,1083,4.735,1084,5.718,1085,5.125,1086,5.125]],["t/260",[0,0.455,692,6.365,1087,5.502,1088,5.502,1089,4.275,1090,3.705,1091,5.502,1092,5.502,1093,5.502,1094,4.275,1095,5.502,1096,5.502,1097,5.502,1098,5.502]],["t/262",[0,0.468,151,1.784,474,2.327,649,3.081,691,4.288,805,2.613,956,2.739,958,3.335,970,3.081,1083,3.081,1099,3.72,1100,3.081,1101,3.72,1102,3.72,1103,3.72,1104,3.72,1105,3.72,1106,3.72,1107,3.72,1108,3.335,1109,3.72,1110,3.72,1111,3.72,1112,3.72,1113,3.72,1114,3.335,1115,3.72]],["t/264",[151,3.468,691,4.687,1083,5.989,1116,7.233,1117,6.483,1118,6.483,1119,7.233,1120,7.233]],["t/266",[0,0.465,14,1.452,15,2.049,19,1.201,93,2.756,151,0.921,360,0.865,368,1.675,466,1.349,693,2.702,728,1.721,736,1.59,739,1.293,747,3.813,1121,1.92,1122,5.62,1123,3.263,1124,1.92,1125,5.62,1126,8.514,1127,1.92,1128,1.92,1129,1.92,1130,1.92,1131,1.92,1132,1.92,1133,4.254,1134,1.92,1135,1.92,1136,1.92,1137,1.92,1138,6.11,1139,1.92,1140,1.92,1141,1.92,1142,1.92,1143,1.92,1144,1.92,1145,1.92,1146,1.92,1147,1.92,1148,1.92,1149,1.92,1150,1.92,1151,1.92,1152,1.92,1153,1.92,1154,1.92,1155,1.92,1156,2.925]],["t/269",[0,0.456,14,1.907,15,1.343,83,1.456,85,1.456,93,1.342,100,1.342,131,0.972,136,2.71,259,0.561,360,2.516,368,0.588,371,0.652,372,1.295,402,1.716,480,2.171,482,0.652,483,0.652,484,0.89,485,3.068,492,3.952,496,1.992,528,1.455,627,1.026,660,2.348,688,0.804,691,0.742,711,0.771,718,2.443,762,0.843,781,1.342,782,2.88,784,0.948,785,2.702,787,0.948,788,0.948,790,3.332,791,0.948,792,0.948,793,2.561,794,2.348,795,0.948,796,0.948,797,0.948,798,0.948,802,0.948,803,2.88,804,3.332,805,1.455,806,1.026,807,2.561,976,0.948,1100,1.715,1157,1.026,1158,1.145,1159,1.026,1160,1.026,1161,1.856,1162,1.026,1163,0.948,1164,2.836,1165,2.071,1166,1.715,1167,3.478,1168,2.071,1169,2.071,1170,2.071,1171,1.145,1172,1.145,1173,1.145,1174,2.071,1175,2.071,1176,2.071,1177,2.071,1178,2.071,1179,1.145,1180,2.071,1181,1.145,1182,3.478,1183,2.071,1184,1.145,1185,1.145,1186,1.026,1187,1.145,1188,2.071,1189,1.145,1190,2.071,1191,2.071,1192,2.071,1193,1.145,1194,1.145,1195,1.145,1196,1.145,1197,1.145,1198,1.145,1199,2.836,1200,1.145,1201,1.145,1202,2.071,1203,1.145,1204,1.145,1205,1.145,1206,1.145,1207,1.145,1208,2.071]],["t/271",[0,0.444,14,0.553,15,0.843,85,1.922,259,0.794,348,1.139,360,3.218,368,1.922,371,0.923,372,1.764,402,2.708,420,2.528,480,3.584,482,0.923,483,0.923,485,1.764,686,1.453,688,1.139,692,1.26,711,1.092,718,1.981,781,1.827,782,3.706,783,1.453,784,1.342,785,1.26,786,1.453,787,1.342,788,1.342,789,1.453,790,4.199,791,1.342,792,1.342,793,4.098,794,3.1,795,1.342,796,1.342,797,1.342,798,1.342,799,1.453,800,1.453,801,1.453,802,1.342,803,1.342,804,2.335,805,1.981,807,2.077,976,1.342,1089,1.26,1090,1.092,1094,1.26,1157,1.453,1159,1.453,1160,1.453,1161,2.528,1162,1.453,1166,1.342,1209,1.621,1210,1.621,1211,2.82,1212,2.82,1213,1.621,1214,1.621,1215,1.621,1216,1.621,1217,1.621,1218,1.621,1219,1.621,1220,1.621,1221,1.621,1222,1.621,1223,1.621,1224,1.621,1225,1.621,1226,1.621,1227,1.621,1228,1.621,1229,1.621,1230,1.621,1231,1.621,1232,1.621,1233,1.621,1234,1.621,1235,1.621,1236,1.621,1237,1.621,1238,1.621,1239,1.621,1240,1.621,1241,1.26,1242,1.621,1243,1.621,1244,1.342,1245,1.621,1246,1.621,1247,1.621,1248,1.621,1249,1.621,1250,1.621,1251,1.621,1252,1.621,1253,1.621,1254,1.621,1255,1.621,1256,1.621,1257,1.621,1258,1.621,1259,1.621,1260,1.621,1261,1.621,1262,1.621,1263,1.621,1264,2.82,1265,1.621]],["t/273",[0,0.446,5,2.139,14,1.36,15,1.056,22,0.598,47,2.351,85,1.553,87,0.882,94,0.672,100,0.62,131,0.449,348,1.235,360,3.037,368,0.903,371,0.545,372,1.892,382,1.002,387,0.672,480,2.553,509,3.399,517,2.351,626,1.576,660,1.456,711,3.764,762,2.228,781,2.581,793,0.704,805,0.672,807,0.704,1047,0.792,1090,2.038,1166,2.02,1266,0.957,1267,0.957,1268,0.957,1269,1.758,1270,2.44,1271,0.957,1272,0.957,1273,2.44,1274,0.957,1275,3.026,1276,1.758,1277,0.957,1278,0.957,1279,0.957,1280,0.957,1281,0.957,1282,0.957,1283,0.957,1284,1.758,1285,4.379,1286,1.758,1287,0.743,1288,3.536,1289,1.758,1290,0.957,1291,0.957,1292,1.758,1293,0.957,1294,0.957,1295,0.957,1296,1.758,1297,0.957,1298,0.957,1299,5.825,1300,2.44,1301,2.44,1302,3.983,1303,0.957,1304,0.957,1305,0.957,1306,0.957,1307,0.957,1308,0.957,1309,0.957,1310,0.957,1311,0.957,1312,0.957,1313,0.957,1314,0.957,1315,0.957,1316,0.957,1317,1.758,1318,2.44,1319,2.44,1320,1.758,1321,0.957,1322,2.44,1323,1.758,1324,0.957,1325,1.758,1326,0.957,1327,0.957,1328,0.957,1329,0.957,1330,0.957,1331,0.957,1332,2.44,1333,0.957,1334,1.758,1335,1.456,1336,0.957,1337,0.957,1338,0.957,1339,1.758,1340,0.957,1341,0.957,1342,0.957,1343,0.957,1344,0.957,1345,0.957,1346,0.957,1347,0.957,1348,0.957,1349,0.957,1350,0.957,1351,0.957,1352,0.957,1353,0.957,1354,0.957,1355,0.957,1356,0.957,1357,0.957,1358,1.758,1359,0.957,1360,0.957,1361,0.957,1362,0.957,1363,0.957,1364,0.957,1365,0.957,1366,0.957,1367,0.957,1368,0.957,1369,0.957,1370,0.957,1371,0.957,1372,0.957,1373,0.957,1374,0.957,1375,0.957,1376,1.758,1377,0.957,1378,0.957,1379,1.758,1380,1.758,1381,0.957,1382,0.957,1383,0.957,1384,0.957,1385,0.957,1386,0.957,1387,0.957,1388,0.957,1389,0.957,1390,0.957,1391,0.957]],["t/276",[0,0.457,14,1.409,15,1.233,259,0.708,348,2.899,360,3.531,368,0.741,371,0.823,372,0.903,466,1.79,480,3.543,482,0.823,483,0.823,485,0.903,705,1.122,711,0.972,718,2.899,724,1.122,726,1.122,739,0.972,759,1.014,781,0.936,810,1.122,1089,1.122,1090,0.972,1094,1.122,1163,1.196,1241,2.658,1244,1.196,1335,1.196,1392,1.196,1393,1.196,1394,1.294,1395,1.294,1396,1.294,1397,1.196,1398,1.294,1399,2.284,1400,2.284,1401,2.284,1402,2.284,1403,2.549,1404,2.549,1405,1.444,1406,4.95,1407,2.284,1408,1.294,1409,2.284,1410,2.284,1411,2.11,1412,2.11,1413,2.284,1414,1.294,1415,1.294,1416,1.294,1417,1.294,1418,1.294,1419,1.294,1420,1.294,1421,1.294,1422,1.294,1423,3.699,1424,1.294,1425,1.294,1426,1.294,1427,1.294,1428,1.294,1429,1.294,1430,1.294,1431,1.294,1432,1.294,1433,1.294,1434,1.294,1435,1.294,1436,1.294,1437,3.066,1438,3.066,1439,1.294,1440,1.294,1441,1.294,1442,1.294,1443,1.294,1444,1.196,1445,1.196,1446,1.196,1447,1.294,1448,1.294,1449,1.294,1450,1.294,1451,1.294,1452,1.294,1453,2.11,1454,1.294,1455,1.294,1456,1.294,1457,3.421,1458,1.196,1459,1.294]],["t/278",[0,0.417,14,1.669,15,1.461,259,0.905,348,3.434,360,3.795,368,0.948,371,1.052,372,1.155,466,2.217,480,3.934,482,1.052,483,1.052,485,1.155,705,1.435,711,1.244,718,3.434,724,1.435,726,1.435,739,1.244,759,1.298,781,1.197,810,1.435,1089,1.435,1090,1.244,1094,1.435,1163,1.53,1241,3.212,1244,1.53,1335,1.53,1392,1.53,1393,1.53,1394,1.656,1395,1.656,1396,1.656,1397,1.53,1398,1.656,1399,2.829,1400,2.829,1401,2.829,1402,2.829,1406,5.294,1407,2.829,1408,1.656,1409,2.829,1410,2.829,1411,2.614,1412,2.614,1413,2.829,1414,1.656,1415,1.656,1416,1.656,1417,1.656,1418,1.656,1419,1.656,1420,1.656,1421,1.656,1422,1.656,1423,4.383,1424,1.656,1425,1.656,1426,1.656,1427,1.656,1428,1.656,1429,1.656,1430,1.656,1431,1.656,1432,1.656,1433,1.656,1434,1.656,1435,1.656,1436,1.656,1437,3.705,1438,3.705,1439,1.656,1440,1.656,1441,1.656,1442,1.656,1443,1.656,1444,1.53,1445,1.53,1446,1.53,1447,1.656,1448,1.656,1449,1.656,1450,1.656,1451,1.656,1452,1.656,1453,2.614,1454,1.656,1455,1.656,1456,1.656,1458,1.53,1459,1.656]],["t/280",[0,0.377,14,0.985,15,1.687,19,2.85,259,1.414,348,2.026,360,3.827,368,1.481,371,1.643,372,1.804,466,3.201,480,3.462,482,1.643,483,1.643,705,2.241,711,4.32,718,3.201,724,2.241,726,2.241,737,2.586,738,2.586,739,1.942,749,2.586,781,1.869,810,2.241,1241,3.541,1392,2.389,1393,2.389,1397,2.389,1406,3.773,1411,2.389,1412,2.389,1444,2.389,1445,2.389,1446,2.389,1453,2.389,1458,2.389,1460,2.885,1461,2.885,1462,2.885,1463,2.885,1464,2.885,1465,2.885,1466,2.885,1467,2.885,1468,2.885,1469,2.885,1470,2.885,1471,2.885,1472,2.885,1473,2.885,1474,2.885,1475,2.885,1476,2.885,1477,2.885,1478,2.026,1479,2.885,1480,2.885,1481,2.885,1482,2.885,1483,2.885,1484,2.885,1485,2.885,1486,2.885,1487,2.885,1488,2.885,1489,2.885,1490,2.885,1491,2.885]],["t/283",[0,0.448]],["t/285",[0,0.419,131,2.997,1492,6.387,1493,6.387,1494,6.387,1495,6.387,1496,6.387,1497,6.387,1498,6.387,1499,6.387,1500,6.387,1501,6.387,1502,6.387]],["t/287",[0,0.413,1503,6.891,1504,6.891,1505,6.891,1506,6.891,1507,6.891,1508,6.176,1509,6.176,1510,6.891]],["t/289",[687,5.193,688,4.069,690,4.069,691,3.754,693,4.797,1046,5.193,1117,5.193,1186,7.53,1511,5.794,1512,5.794,1513,5.794,1514,5.794,1515,5.794,1516,7.551,1517,5.794,1518,5.794,1519,5.193,1520,5.193,1521,5.193,1522,5.794]],["t/291",[0,0.459,85,2.508,485,4.213,492,4.536,688,4.731,991,4.046,993,3.598,994,3.598,996,3.797,997,3.797,998,3.797,1523,4.886,1524,4.886,1525,4.886,1526,4.379,1527,4.886,1528,4.886,1529,4.886]],["t/293",[0,0.449,730,6.176,1530,6.891,1531,6.891,1532,6.891,1533,6.891]],["t/295",[0,0.458,467,5.397,492,4.008,496,4.18,993,4.382,994,4.382,996,4.625,1534,5.952,1535,5.335,1536,5.952]],["t/297",[0,0.454,993,4.773,994,4.773,997,5.037,998,5.037,1537,6.482,1538,6.482,1539,6.482]],["t/299",[0,0.449,6,3.068,15,1.828,131,2.871,1090,4.12,1540,6.118,1541,6.118,1542,6.118,1543,6.118,1544,6.118,1545,6.118,1546,6.118]],["t/301",[151,3.526,474,4.6,956,5.416,1081,6.593,1085,6.593,1086,6.593,1547,7.355]],["t/303",[991,5.989,993,5.326,994,5.326,996,5.621,997,5.621,998,5.621,1526,6.483,1548,7.233]],["t/305",[0,0.452,6,3.541,131,2.458,151,2.511,474,3.276,633,4.07,691,3.394,956,3.857,970,4.337,1100,4.337,1108,4.695,1114,4.695,1118,4.695,1549,5.238,1550,5.238,1551,5.238,1552,5.238,1553,5.238,1554,5.238]],["t/307",[0,0.399,83,3.279,132,4.703,402,3.865,690,4.486,1519,5.725,1520,5.725,1521,5.725,1555,6.387,1556,6.387,1557,6.387,1558,6.387,1559,6.387,1560,6.387]],["t/309",[0,0.462,14,2.431,15,2.127,151,2.542,307,4.39,445,4.39,1561,5.301,1562,4.752,1563,5.301,1564,5.301,1565,5.301,1566,5.301,1567,5.301]],["t/312",[0,0.419]],["t/314",[0,0.454]],["t/316",[0,0.419]],["t/319",[541,4.905,1568,7.481,1569,7.481,1570,7.481,1571,6.706]],["t/321",[0,0.39,382,4.645,541,5.453,558,2.83,1508,4.192,1509,4.192,1571,4.192,1572,4.677,1573,4.677,1574,4.677,1575,6.534,1576,4.677,1577,4.677,1578,4.677,1579,4.677,1580,4.677,1581,4.677,1582,4.677,1583,4.677,1584,4.677,1585,4.677]],["t/325",[0,0.397,1,3.327,3,3.638,4,3.638,5,2.99,6,3.404,7,3.638,9,3.638,10,3.471,12,4.998,14,1.687,15,2.028,16,3.09,17,4.107,18,3.202,19,3.09,20,3.327,21,3.471,22,4.245,23,2.599,24,3.638,25,3.638,170,3.84,1586,4.941,1587,4.941,1588,4.941,1589,4.941]],["t/327",[0,0.424,6,4.382,10,3.118,14,2.984,15,1.326,16,2.776,17,3.811,18,2.876,19,2.776,20,2.989,21,3.118,22,3.939,23,2.335,31,4.638,32,4.638,33,4.638,36,3.118,37,3.268,39,3.268,1590,4.439,1591,4.439,1592,4.439,1593,4.439]],["t/330",[0,0.402,48,5.367,50,5.367,51,6.718,54,3.922,55,5.367,56,5.367,57,5.367,1594,6.482,1595,6.482,1596,6.482,1597,6.482]],["t/332",[0,0.442,15,1.331,16,1.28,17,1.238,45,1.59,59,1.507,83,2.683,108,2.535,127,2.318,131,1.616,134,2.535,170,5.891,528,2.418,1090,3.519,1598,2.046,1599,2.046,1600,2.046,1601,2.046,1602,3.443,1603,2.046,1604,2.046,1605,2.046,1606,2.046,1607,2.046,1608,2.046,1609,2.046,1610,2.046,1611,5.226,1612,2.046,1613,2.046,1614,2.046,1615,2.046,1616,2.046,1617,2.046,1618,4.457,1619,2.046,1620,2.046,1621,2.046,1622,2.046,1623,3.443,1624,2.046,1625,2.046,1626,2.046,1627,2.046,1628,2.046,1629,2.046,1630,2.046,1631,3.443,1632,2.046,1633,2.046,1634,2.046,1635,2.046,1636,2.046,1637,2.046,1638,2.046,1639,2.046,1640,2.046,1641,2.046,1642,2.046,1643,3.443,1644,2.046,1645,2.046,1646,2.046,1647,2.046,1648,2.046,1649,2.046,1650,2.046,1651,2.046,1652,2.046,1653,2.046,1654,2.046,1655,1.834,1656,3.443,1657,2.046,1658,4.457,1659,3.086,1660,1.834,1661,1.834,1662,5.226,1663,2.046,1664,3.443,1665,2.046,1666,2.046,1667,2.046,1668,2.046]],["t/334",[0,0.438,1478,4.692,1669,8.046,1670,6.68,1671,5.191,1672,5.988,1673,6.68]],["t/336",[0,0.436,15,1.493,131,2.345,509,3.365,1478,3.511,1671,6.061,1674,4.998,1675,9.072,1676,6.841,1677,4.998,1678,4.998,1679,4.998,1680,4.998,1681,4.998,1682,4.48,1683,4.998,1684,4.998,1685,4.998,1686,4.998]],["t/338",[0,0.416,85,3.594,558,4.236,1287,5.44,1669,6.276,1687,7.001,1688,7.001,1689,7.001]],["t/340",[0,0.432,6,0.922,14,1.074,15,1.457,16,1.15,54,1.903,83,2.503,127,2.118,134,2.316,208,1.354,528,2.209,558,4.946,815,3.59,1478,2.209,1659,2.819,1660,1.649,1661,1.649,1671,1.429,1690,1.84,1691,1.84,1692,1.84,1693,1.84,1694,3.145,1695,6.029,1696,1.84,1697,1.84,1698,1.84,1699,1.84,1700,1.84,1701,1.84,1702,2.819,1703,2.819,1704,1.84,1705,1.84,1706,1.84,1707,1.84,1708,1.84,1709,1.84,1710,4.37,1711,1.84,1712,6.294,1713,4.37,1714,3.145,1715,1.84,1716,1.84,1717,1.84,1718,1.84,1719,1.84,1720,1.84,1721,1.84,1722,1.84,1723,1.84,1724,1.84,1725,1.84,1726,1.84,1727,1.84,1728,1.84,1729,1.84,1730,1.84,1731,1.84,1732,1.649,1733,1.84,1734,4.12,1735,4.876,1736,4.911,1737,4.12,1738,1.84,1739,1.84,1740,1.84,1741,1.84,1742,1.84,1743,3.145,1744,1.84,1745,1.84,1746,1.84,1747,2.604,1748,1.84,1749,1.84,1750,1.649,1751,1.84,1752,1.84,1753,1.84,1754,1.84]],["t/342",[0,0.402,131,2.171,382,3.695,558,4.911,815,5.514,1478,3.25,1695,4.147,1702,4.147,1703,4.147,1710,6.712,1732,4.147,1736,4.147,1747,5.37,1750,4.147,1755,4.627,1756,4.627,1757,4.627,1758,4.627,1759,4.627,1760,4.627,1761,4.627,1762,4.627,1763,4.627,1764,4.627,1765,4.627,1766,4.627,1767,4.627,1768,4.627]],["t/344",[0,0.464,5,2.556,6,2.118,14,1.442,54,2.556,85,2.168,87,2.118,94,2.967,382,2.407,558,3.678,1769,6.079,1770,4.224,1771,4.224,1772,4.224,1773,4.224,1774,6.079,1775,4.224,1776,4.224,1777,4.224,1778,4.224,1779,4.224,1780,4.224,1781,4.224]],["t/346",[0,0.408,558,4.65,760,4.928,1478,4.18,1671,6.613,1672,5.335,1782,5.952,1783,5.952,1784,5.952,1785,7.684,1786,5.952,1787,5.335,1788,5.952,1789,5.952]],["t/348",[0,0.427,15,1.686,1790,5.644,1791,5.644,1792,5.644,1793,5.644,1794,5.644,1795,5.644,1796,5.644,1797,5.644,1798,5.644,1799,5.644,1800,5.644,1801,5.644,1802,5.644,1803,5.644,1804,5.644,1805,5.644,1806,5.644,1807,5.644]],["t/350",[0,0.308,136,5.619,541,3.759,690,4.765,1808,6.784,1809,6.784,1810,6.784,1811,6.784,1812,6.784,1813,6.784,1814,6.784]],["t/352",[0,0.375,83,4.243,93,4.328,541,4.974,1815,6.68,1816,6.68,1817,6.68,1818,6.68,1819,6.68]],["t/354",[0,0.313,83,4.325,541,5.043,1820,6.891,1821,6.891,1822,6.891,1823,6.891,1824,6.891]],["t/356",[0,0.399,15,1.908,83,3.279,93,4.138,540,5.725,541,5.115,690,4.486,736,5.289,1825,6.387,1826,6.387,1827,6.387]],["t/358",[0,0.419]],["t/360",[1828,7.747,1829,7.747,1830,7.747,1831,7.747]],["t/362",[85,3.014,170,6.57,558,3.553,940,5.263,1832,7.617,1833,7.617,1834,5.872,1835,5.263,1836,5.872,1837,5.263,1838,5.263,1839,5.872,1840,7.617,1841,5.872,1842,5.872,1843,5.872,1844,5.263]],["t/364",[0,0.411,85,3.281,558,5.13,1712,5.728,1713,5.728,1747,6.131,1835,4.061,1837,5.728,1838,5.728,1845,4.531,1846,4.531,1847,6.391,1848,4.531,1849,4.531,1850,4.531,1851,4.061,1852,4.531,1853,4.531,1854,4.531,1855,4.531,1856,4.531,1857,4.531,1858,4.531,1859,4.531,1860,4.531,1861,4.531]],["t/367",[59,5.077,259,3.38,387,4.843,541,3.821,766,5.077,909,4.532,1862,7.555,1863,5.71,1864,5.056,1865,5.056,1866,5.056,1867,5.056,1868,4.532,1869,6.181,1870,5.056,1871,5.056,1872,5.056,1873,5.056,1874,5.056,1875,6.181,1876,3.928,1877,5.056,1878,4.532]],["t/369",[59,3.443,240,4.192,259,3.691,276,4.192,327,3.872,387,3.285,421,4.192,541,2.591,689,5.411,691,4.88,766,4.811,1862,7.308,1863,3.872,1868,4.192,1875,5.857,1876,3.634,1878,6.751,1879,4.677,1880,4.677,1881,4.677,1882,3.872,1883,4.677,1884,4.192,1885,4.677,1886,4.677,1887,4.677,1888,4.677,1889,4.677,1890,4.677]],["t/371",[259,3.13,691,5.699,766,5.918,1787,5.725,1863,5.289,1869,5.725,1876,6.246,1884,5.725,1891,6.387,1892,6.387,1893,6.387,1894,6.387]],["t/374",[0,0.346,1895,7.612,1896,7.612,1897,7.612,1898,7.612]],["t/376",[29,5.54,1899,7.069,1900,7.887]],["t/378",[0,0.425,1047,6.09,1899,6.593,1901,7.355,1902,7.355]],["t/381",[136,5.216,1903,7.747,1904,7.747,1905,6.944]],["t/383",[0,0.422,1,0.864,6,2.974,14,0.438,15,1.445,16,0.802,17,3.177,18,0.831,19,0.802,20,2.095,23,1.207,54,3.589,61,1.611,64,1.9,68,3.394,100,0.831,105,0.901,118,0.997,136,2.095,191,2.789,208,0.945,480,2.043,512,1.062,766,1.689,1156,1.15,1535,5.031,1562,2.789,1655,2.056,1876,1.783,1905,1.15,1906,1.283,1907,2.294,1908,1.283,1909,1.283,1910,1.283,1911,1.283,1912,1.283,1913,1.283,1914,1.283,1915,1.283,1916,2.294,1917,1.283,1918,2.294,1919,2.294,1920,1.283,1921,2.294,1922,1.283,1923,1.283,1924,1.283,1925,1.283,1926,1.283,1927,1.283,1928,2.294,1929,1.283,1930,3.112,1931,2.294,1932,2.294,1933,1.283,1934,1.283,1935,1.283,1936,1.283,1937,1.283,1938,1.283,1939,1.283,1940,1.283,1941,1.283,1942,1.283,1943,2.294,1944,2.294,1945,1.283,1946,1.283,1947,1.283,1948,1.283,1949,3.112,1950,1.283,1951,2.294,1952,1.283,1953,1.283,1954,3.112,1955,2.294,1956,1.283,1957,2.294,1958,1.15,1959,2.294,1960,2.294,1961,2.294,1962,1.283,1963,1.283,1964,3.112,1965,3.787,1966,4.353,1967,1.283,1968,2.294,1969,1.283,1970,1.283,1971,1.283,1972,1.283,1973,1.283,1974,1.283,1975,1.283,1976,1.283,1977,1.283,1978,1.283,1979,1.283,1980,1.283,1981,1.283,1982,1.283,1983,3.394,1984,1.283,1985,3.787,1986,1.283,1987,1.283,1988,1.283,1989,1.283,1990,1.15,1991,1.283,1992,1.283,1993,1.283,1994,1.283,1995,1.283,1996,1.283,1997,1.283,1998,1.283,1999,1.283,2000,1.283,2001,1.283,2002,1.283,2003,2.294,2004,2.294,2005,2.294,2006,2.294,2007,2.294,2008,1.283,2009,1.283,2010,1.283,2011,1.283,2012,1.283,2013,1.283,2014,1.283,2015,1.283,2016,1.283,2017,1.283,2018,1.283,2019,1.283,2020,1.283,2021,1.283,2022,1.283]],["t/385",[14,2.742,2023,8.032]],["t/388",[2,5.807,1682,7.069,2024,7.887]],["t/390",[0,0.358,1,5.311,2025,7.887]],["t/392",[2026,8.182]],["t/394",[0,0.384,14,2.004,69,6.307,1287,4.562,1958,5.263,1983,5.263,2027,5.872,2028,5.872,2029,5.872,2030,5.872,2031,5.872,2032,5.872,2033,5.872,2034,5.872,2035,5.872,2036,5.872,2037,5.872,2038,5.872,2039,5.872]],["t/397",[0,0.439,14,1.122,15,1.279,78,1.604,79,1.361,80,1.505,81,1.604,82,2.722,83,4.381,84,1.604,85,3.535,86,1.604,87,2.832,88,1.604,89,1.604,90,1.604,91,1.604,92,1.604,93,3.269,94,1.361,95,1.604,96,2.722,97,1.604,98,1.604,99,1.604,100,3.66,101,1.604,102,1.604,103,1.604,104,1.604,105,1.361,106,1.604,107,1.604,108,3.153,109,1.604,110,2.722,111,1.604,112,1.604,113,1.604,114,1.604,115,1.604,116,1.604,117,1.604,118,2.554,119,1.604,120,2.722,121,1.604,122,2.722,123,1.604,124,1.604,125,1.604,126,1.604,127,2.214,128,1.604,129,1.604,130,3.546,131,3.071,132,1.427,133,2.722,134,1.427,135,1.604,136,3.804,137,2.722,138,2.722,139,1.604,140,1.604,141,1.604,142,1.604,143,1.604,144,1.604,145,1.604,146,1.604,147,1.604,148,1.604,149,1.604]],["t/400",[0,0.372]],["t/402",[0,0.443,14,2.556,85,3.844,415,4.147,558,4.531,633,3.595,1844,4.147,1882,3.831,2040,4.627,2041,4.627,2042,4.627,2043,4.627,2044,4.627,2045,4.627,2046,4.627,2047,4.627,2048,4.627,2049,7.488,2050,4.627,2051,4.627,2052,4.627,2053,4.627,2054,4.627]],["t/404",[15,2.356,2055,7.887,2056,7.887]],["t/409",[0,0.466,15,2.237,83,4.166,100,5.259,2057,4.627,2058,4.627,2059,4.627,2060,4.627,2061,4.627,2062,4.627,2063,4.627]],["t/411",[0,0.334,2064,7.355,2065,7.355,2066,7.355,2067,7.355,2068,7.355,2069,7.355]],["t/413",[0,0.419]],["t/415",[453,5.747]],["t/417",[0,0.358,2070,7.887,2071,7.887]],["t/419",[2072,7.887,2073,7.887,2074,6.128]],["t/422",[0,0.34,132,5.508,280,5.813,2074,6.879,2075,7.481]],["t/424",[0,0.442,14,1.832,17,3.247,61,3.769,1990,4.81,2074,6.286,2076,4.81,2077,5.366,2078,5.366,2079,7.251,2080,5.366,2081,5.366,2082,4.81,2083,6.435,2084,5.366,2085,5.366]],["t/426",[0,0.389,14,2.647,17,3.651,2074,6.656,2076,5.408,2079,6.949,2082,5.408,2083,5.408,2086,6.034,2087,6.034,2088,6.034,2089,6.034,2090,6.034,2091,6.034]],["t/428",[0,0.372]],["t/430",[0,0.437,5,4.344,18,4.652,441,6.435,2092,5.366,2093,4.81,2094,8.09,2095,5.366,2096,5.366,2097,5.366,2098,4.81,2099,8.09,2100,5.366,2101,5.366,2102,5.366]],["t/432",[0,0.34,286,6.706,2103,7.481,2104,7.481,2105,7.481,2106,7.481]],["t/434",[0,0.41,2093,6.944,2098,6.944]],["t/437",[1851,6.276,2107,7.001,2108,7.001,2109,7.001,2110,7.001,2111,7.001,2112,7.001,2113,7.001,2114,7.001,2115,6.276]],["t/439",[0,0.344,36,3.32,517,3.673,876,4.237,2115,4.237,2116,4.727,2117,4.727,2118,7.575,2119,6.584,2120,4.727,2121,4.727,2122,4.727,2123,4.727,2124,4.727,2125,4.727,2126,4.727,2127,4.727,2128,4.727,2129,4.727,2130,4.727,2131,6.584,2132,4.727,2133,6.584,2134,6.584,2135,7.575,2136,4.727,2137,4.727,2138,4.727,2139,4.727,2140,4.727]],["t/442",[2141,8.182]],["t/444",[0,0.405,151,3.154,233,5.897,382,3.748,2142,6.579,2143,8.916,2144,6.579,2145,6.579,2146,6.579,2147,6.579]],["t/446",[0,0.419]],["t/448",[453,5.747]],["t/450",[0,0.365,266,7.199]],["t/452",[453,5.747]],["t/454",[0,0.403,27,3.141,69,3.532,83,2.19,461,3.823,608,3.314,785,3.314,807,3.141,1287,3.314,1882,3.532,2148,4.266,2149,6.122,2150,4.266,2151,4.266,2152,4.266,2153,4.266,2154,4.266,2155,4.266,2156,7.825,2157,7.825,2158,7.825,2159,7.825,2160,6.122,2161,4.266,2162,6.122,2163,4.266,2164,4.266,2165,4.266,2166,4.266]],["t/456",[0,0.415,2167,7.887]],["t/458",[0,0.365,2168,8.032]],["t/460",[0,0.452,387,4.692,2169,6.68,2170,8.266,2171,6.68,2172,6.68]],["t/462",[0,0.419]]],"invertedIndex":[["",{"_index":0,"t":{"3":{"position":[[0,7],[18,1],[55,1],[68,1],[146,1],[330,1],[375,1],[460,1],[478,1],[482,1],[490,1],[596,1],[614,1],[618,1],[626,1],[734,1],[847,1]]},"5":{"position":[[10,1],[47,1],[60,1],[138,1],[322,1],[367,1],[452,1],[470,1],[474,1],[482,1],[588,1],[606,1],[610,1],[618,1],[726,1],[839,1]]},"7":{"position":[[19,4],[61,19],[113,1],[213,1],[297,5],[310,5],[328,5]]},"9":{"position":[[322,18],[341,23],[439,7],[447,11],[459,4],[464,19]]},"11":{"position":[[0,9],[97,1],[116,1],[135,1],[155,1],[174,1],[194,1],[223,1],[281,1],[307,1],[337,1],[402,1],[499,1],[532,1],[538,1],[565,1],[570,1],[603,1],[608,1],[620,3],[624,1],[639,1],[728,1],[776,1],[778,8],[789,1],[876,1],[907,2],[944,1],[1059,1],[1087,1],[1115,1],[1148,3],[1221,3],[1227,1],[1268,1],[1309,1],[1342,3],[1424,3]]},"13":{"position":[[87,1],[106,1],[125,1],[145,1],[164,1],[184,1],[213,1],[271,1],[297,1],[327,1],[392,1],[489,1],[522,1],[528,1],[555,1],[560,1],[593,1],[598,1],[610,3],[614,1],[629,1],[718,1],[766,1],[768,8],[779,1],[866,1],[897,2],[934,1],[1049,1],[1077,1],[1105,1],[1138,3],[1211,3],[1217,1],[1258,1],[1299,1],[1332,3],[1414,3]]},"15":{"position":[[0,2],[3,1],[5,9],[15,36],[52,28]]},"17":{"position":[[0,36],[37,28]]},"19":{"position":[[0,7],[18,1],[55,1],[68,1],[146,1],[330,1],[375,1],[460,1],[478,1],[482,1],[490,1],[596,1],[614,1],[618,1],[626,1],[734,1],[847,1]]},"21":{"position":[[10,1],[47,1],[60,1],[138,1],[322,1],[367,1],[452,1],[470,1],[474,1],[482,1],[588,1],[606,1],[610,1],[618,1],[726,1],[839,1]]},"23":{"position":[[19,4],[61,19],[113,1],[213,1],[297,5],[310,5],[328,5]]},"27":{"position":[[0,23],[24,25],[50,23],[74,6],[81,4],[86,6],[93,17]]},"29":{"position":[[0,20],[21,36],[58,13],[72,22]]},"31":{"position":[[0,14]]},"38":{"position":[[72,31],[277,5],[283,5]]},"40":{"position":[[0,57]]},"44":{"position":[[74,2],[233,3],[246,2],[368,11],[380,7],[401,5],[407,2],[410,28],[439,39]]},"48":{"position":[[33,44],[78,47]]},"51":{"position":[[0,2],[54,19],[74,13]]},"53":{"position":[[0,2]]},"56":{"position":[[0,2],[3,19]]},"60":{"position":[[0,10],[11,2],[14,31],[46,5],[52,2],[55,20],[76,7],[84,7],[92,15]]},"63":{"position":[[0,37],[38,2],[41,23],[65,30]]},"65":{"position":[[0,7],[8,20],[39,13]]},"68":{"position":[[0,15],[16,43],[60,15],[76,22]]},"70":{"position":[[12,9],[22,24],[90,2],[93,4],[107,1],[270,1],[444,1],[631,1]]},"73":{"position":[[0,61],[62,42],[105,56],[162,39],[202,32],[235,47]]},"75":{"position":[[0,40],[41,61],[135,2],[173,7],[181,4],[207,13]]},"77":{"position":[[0,42],[45,5],[51,32],[86,5],[92,20]]},"79":{"position":[[0,46],[49,5],[71,4],[78,5],[84,54]]},"81":{"position":[[0,27],[28,6],[35,17],[53,22]]},"85":{"position":[[28,14],[43,16],[60,7],[68,11],[80,8],[195,10]]},"89":{"position":[[19,9],[29,9],[39,7]]},"91":{"position":[[0,9],[10,20]]},"93":{"position":[[49,5],[105,17],[123,19],[143,19]]},"95":{"position":[[0,6],[167,16],[184,8],[258,58],[317,3],[354,5],[441,17],[459,2],[487,2],[514,2],[517,4],[538,2],[557,3],[578,5],[633,1],[702,2],[733,1],[799,1],[862,1],[929,1],[970,2],[982,1],[1060,2],[1097,3],[1143,5],[1198,1],[1234,2],[1259,2],[1287,2],[1290,4],[1308,2],[1421,1],[1449,1],[1570,1],[1572,14],[1643,1],[1645,14],[1774,1],[1803,1],[1878,1],[1905,1],[1963,1],[2023,1],[2080,5],[2086,12],[2099,16],[2116,15]]},"103":{"position":[[56,10],[67,14],[82,15],[98,13]]},"110":{"position":[[123,1],[159,1],[184,1],[200,1],[220,2],[232,1],[256,9],[266,1],[282,1],[292,1],[315,1],[347,2],[374,1],[415,1],[417,8],[441,1],[472,1],[479,1],[493,1],[495,1],[497,1],[499,1],[501,1],[572,1],[630,4],[635,2],[638,1],[666,1],[736,1],[805,1],[863,1],[881,1],[883,1],[885,1],[936,1],[1002,2],[1335,1],[1371,1],[1396,1],[1412,1],[1432,2],[1444,1],[1468,9],[1478,1],[1494,1],[1504,1],[1527,1],[1559,2],[1586,1],[1656,1],[1687,1],[1694,1],[1708,1],[1710,1],[1712,1],[1714,1],[1716,1],[1718,2],[1721,1],[1750,1],[1803,1],[1830,2],[1871,1],[2145,2],[2254,1],[2312,2],[2340,2],[2352,1]]},"112":{"position":[[67,2],[225,15],[365,1],[393,1],[514,1],[516,14],[587,1],[589,14],[718,1],[747,1],[822,1],[849,1],[907,1],[967,1],[1024,13],[1053,36]]},"114":{"position":[[0,2],[3,14]]},"118":{"position":[[0,2]]},"120":{"position":[[0,2]]},"122":{"position":[[0,31]]},"124":{"position":[[39,1],[41,2],[44,1],[46,23],[70,2],[90,2],[93,1],[103,1],[108,2],[111,17],[143,1],[148,2],[151,2],[163,1],[179,1],[191,1],[193,2],[196,13],[215,1],[227,1],[232,1],[250,2],[276,2],[288,1],[290,2],[347,1],[357,1],[375,1],[382,1],[393,1],[400,1],[402,2],[405,9],[438,1],[440,2],[502,1],[514,1],[522,2],[525,26],[552,1],[561,2],[564,6],[571,1],[595,1],[597,2],[648,1],[667,2],[670,6],[677,1]]},"126":{"position":[[0,14],[15,2],[32,7],[125,1],[127,2],[130,1],[132,23],[156,2],[176,2],[179,1],[189,1],[194,2],[197,17],[229,1],[234,2],[237,2],[249,1],[265,1],[277,1],[279,2],[282,13],[301,1],[313,1],[318,1],[336,2],[362,2],[374,1],[376,2],[433,1],[443,1],[461,1],[468,1],[479,1],[486,1],[488,2],[491,9],[524,1],[526,2],[588,1],[600,1],[608,2],[611,26],[638,1],[647,2],[650,6],[657,1],[681,1],[683,2],[734,1],[753,2],[756,6],[763,1],[776,1],[799,2],[805,1],[832,1],[853,2],[861,1],[877,1],[898,1],[931,1],[953,1],[965,1],[984,1],[991,2],[999,1],[1006,1],[1021,1],[1040,2],[1061,1],[1068,2],[1076,1],[1078,1],[1085,2],[1094,1],[1106,1]]},"129":{"position":[[0,5],[6,2],[11,6],[22,2],[39,14],[54,7],[62,9],[74,5],[86,1],[91,1],[95,8],[104,7],[112,18],[131,41],[173,43],[217,45]]},"131":{"position":[[0,5],[6,4],[11,30],[42,6],[49,40],[107,4],[112,1],[114,1]]},"134":{"position":[[0,7],[8,74],[83,9],[93,90],[184,9],[260,10],[271,46],[318,5],[324,36],[361,16],[378,16],[395,8],[404,33],[467,1],[482,1],[519,3],[552,9],[564,12],[603,9],[615,11],[660,3],[666,10],[677,41],[736,2],[763,5]]},"140":{"position":[[16,2],[135,2],[138,15],[154,30],[185,6]]},"142":{"position":[[162,10],[173,6],[180,6],[187,12],[200,4]]},"144":{"position":[[0,31],[81,48]]},"149":{"position":[[0,2],[518,2],[598,20],[719,2],[722,14],[786,10]]},"151":{"position":[[11,7]]},"153":{"position":[[3,5],[62,24],[87,33],[154,2],[197,4],[202,28]]},"155":{"position":[[0,3],[82,3]]},"163":{"position":[[134,38]]},"167":{"position":[[146,6]]},"169":{"position":[[0,48],[105,15],[121,91]]},"174":{"position":[[3,7],[47,7]]},"176":{"position":[[3,7],[47,7]]},"179":{"position":[[0,51],[52,16],[128,12],[141,4],[146,10],[157,12],[170,9],[180,63],[244,2],[247,14],[262,13],[276,15],[360,20],[381,2],[384,72],[457,13],[471,43],[515,68],[584,87],[672,14],[687,9],[697,49],[747,12],[760,28],[789,14],[915,55]]},"181":{"position":[[0,7],[8,74],[83,9],[93,90],[184,9],[260,10],[271,46],[318,5],[324,36],[361,16],[378,16],[395,8],[404,33],[467,1],[482,1],[519,3],[552,9],[564,12],[603,9],[615,11],[660,3],[666,10],[677,41],[736,2],[763,5]]},"183":{"position":[[0,2],[3,14]]},"187":{"position":[[0,2],[3,5],[9,8],[18,12],[31,7],[39,4]]},"189":{"position":[[0,51],[52,16]]},"191":{"position":[[0,12],[13,4],[18,10],[29,12],[42,9],[52,63],[116,2],[119,14],[134,13],[148,15],[232,20],[253,2],[256,72],[329,13],[343,43],[387,68],[456,87],[544,14],[559,9],[569,49],[619,12],[632,28],[661,14],[787,55]]},"193":{"position":[[0,2]]},"195":{"position":[[57,1],[70,1],[80,2],[94,1],[102,1],[106,1],[113,1],[127,1],[132,1],[136,1],[140,1],[145,1],[149,1],[157,2],[164,1],[172,2],[178,1],[185,2],[190,2],[199,1],[201,1],[213,1]]},"197":{"position":[[0,20],[94,1],[99,1],[103,1],[107,1],[112,1],[116,1],[122,2],[125,5],[133,2],[140,1],[165,1],[181,1],[188,1],[205,1],[211,1],[217,1],[222,1],[230,1]]},"199":{"position":[[0,2],[3,12]]},"203":{"position":[[0,2],[3,93],[97,5],[263,5]]},"205":{"position":[[57,1],[131,1],[139,2],[144,2],[149,2],[155,1],[164,1],[179,2],[182,6],[198,1],[204,1],[206,2],[209,6],[216,1],[218,2],[221,5],[238,1],[245,1],[267,1],[279,2],[297,1],[352,1],[364,2],[369,2],[376,2],[382,1],[391,1],[404,1],[415,2],[428,1],[457,1],[459,1],[471,1]]},"207":{"position":[[0,47]]},"209":{"position":[[0,2]]},"211":{"position":[[57,1],[73,2],[88,1],[98,1],[132,1],[142,1],[158,1],[165,1],[182,1],[188,1],[194,1],[199,1],[213,1],[223,1],[228,2],[234,1],[241,1],[251,1],[269,1],[271,1],[328,2],[334,1],[341,2],[349,2],[368,1],[375,1],[382,2],[388,2],[397,1],[399,1]]},"213":{"position":[[19,19],[71,8],[83,1],[98,1]]},"217":{"position":[[5,1],[9,4],[23,7],[40,8],[54,1],[62,1],[66,6],[77,4],[87,1],[93,4],[103,1],[109,1],[113,4],[123,1],[129,1],[134,4],[144,1],[149,4],[159,1],[169,1],[173,7],[186,1],[196,1],[200,10],[216,1],[220,3],[229,1],[237,1],[241,3],[248,4],[258,1],[263,5],[273,6],[284,6]]},"220":{"position":[[0,3],[4,2],[7,3],[11,2],[14,3],[18,2],[30,4],[43,6],[58,6],[73,4],[87,4],[101,5],[114,4],[128,5],[143,4],[157,6],[173,6],[203,5],[218,8],[234,4]]},"222":{"position":[[21,3],[25,2],[28,3],[32,2],[35,3],[39,2],[52,4],[63,4],[75,2],[87,3],[97,1],[103,6],[115,2],[125,3],[133,2],[142,2],[165,4]]},"226":{"position":[[0,10],[39,1],[45,1],[75,1],[82,1],[88,1],[95,4],[194,11],[255,7],[344,7],[430,6],[527,1],[557,1],[600,1],[638,1],[675,1],[681,1],[691,1],[697,1],[723,1],[725,1],[768,1],[770,1]]},"229":{"position":[[18,1],[71,2]]},"231":{"position":[[0,10],[34,1],[36,1],[70,12],[118,1],[137,1],[160,1],[186,1],[237,1],[239,1],[301,2],[304,9],[314,8],[345,1],[398,15],[423,1],[457,2],[476,1],[511,2],[514,1],[561,1],[587,8],[610,8],[631,10],[667,1],[695,1],[716,1],[726,1],[750,1],[771,1],[773,8],[813,1],[815,3],[819,3],[823,3],[840,1],[842,1]]},"233":{"position":[[77,1]]},"236":{"position":[[7,1],[18,35],[61,1],[128,1]]},"238":{"position":[[0,6],[24,1],[39,19],[71,1],[86,1],[114,1]]},"240":{"position":[[23,1],[51,1],[58,1],[67,3],[71,1],[78,1],[94,1]]},"242":{"position":[[0,3]]},"244":{"position":[[17,4],[31,9],[41,9],[58,18],[205,1],[222,1],[224,3],[496,17],[525,17],[549,17],[579,17],[739,7],[773,39],[837,1]]},"246":{"position":[[13,30],[44,11],[56,2],[59,2],[121,22],[156,20],[183,15],[208,15],[230,10],[246,15],[344,15],[377,16],[403,15],[428,18],[460,22],[555,10],[566,2],[569,2],[577,17],[609,20],[643,23],[679,2],[684,9],[699,14],[721,8],[732,9],[751,34],[797,20],[820,4],[868,17],[905,20],[941,15],[964,18],[993,18],[1024,25],[1065,25],[1099,22],[1135,29],[1181,29],[1222,22],[1253,12],[1279,13],[1300,12],[1325,13],[1352,24],[1382,8],[1403,9],[1423,24],[1455,35],[1503,16],[1530,5],[1536,2],[1539,2],[1600,10],[1631,11],[1664,11],[1693,16],[1710,5],[1716,2],[1719,2],[1737,11],[1770,10],[1793,28],[1835,4],[1858,9],[1873,2],[1888,4],[1893,8],[1902,2],[1905,2],[1922,19],[1954,20],[1987,26],[2026,23],[2050,5],[2056,2],[2059,2],[2071,17],[2103,4],[2114,2],[2132,9],[2159,9],[2194,10],[2215,9],[2225,4],[2230,2],[2233,2],[2244,13],[2277,18],[2306,11],[2327,12],[2350,9],[2370,11],[2382,8],[2391,2],[2394,2],[2417,19],[2461,10],[2476,13],[2502,13],[2520,13],[2546,13],[2572,16],[2609,16],[2643,23],[2684,23]]},"248":{"position":[[274,2],[323,2],[377,2],[451,2],[454,15],[480,2],[483,5],[498,2],[501,5],[522,2],[543,2],[546,4],[561,2],[564,14],[592,2],[595,6],[612,2],[615,15],[639,2],[642,21],[682,2],[778,2],[843,2],[933,2],[971,2],[1043,2],[1106,2],[1109,10],[1130,2],[1174,2],[1242,2],[1298,2],[1335,2],[1394,2],[1397,13],[1411,2],[1414,1],[1416,23],[1440,2],[1513,1],[1547,2],[1553,1],[1571,1],[1584,1],[1602,2],[1611,1],[1613,1],[1620,2],[1628,2],[1631,1],[1633,2],[1636,1],[1643,2],[1662,1]]},"250":{"position":[[219,2],[222,15],[246,2],[249,21],[287,2],[314,2],[339,2],[342,6],[363,2],[366,6],[382,2],[385,4],[400,2],[403,14],[430,2],[502,2],[565,2],[568,10],[589,2],[592,5],[607,2],[610,5],[626,2],[657,2],[703,2],[799,2],[864,2],[953,2],[987,2]]},"252":{"position":[[329,8],[338,3],[342,2],[354,8],[676,5],[682,4],[687,2],[701,4],[713,10],[724,3],[749,4],[763,4],[776,4],[789,6],[805,6],[822,4],[827,1],[829,5],[861,7],[885,11],[908,11],[928,9],[949,13],[973,17],[991,1],[1000,6],[1033,2],[1104,12],[1126,8],[1149,10],[1176,9]]},"254":{"position":[[149,2],[160,2]]},"256":{"position":[[336,2],[349,2],[359,1],[368,2],[519,2],[664,2],[667,4],[672,2],[681,1],[691,2],[783,1],[798,2],[892,2],[895,4],[900,2],[917,2],[965,2],[1001,2],[1004,4],[1009,2],[1022,2],[1025,6],[1043,2],[1046,6],[1064,2],[1067,6]]},"258":{"position":[[179,2],[182,2],[194,2],[197,2],[209,2],[212,4],[228,2],[253,2],[256,8]]},"260":{"position":[[184,2],[187,3],[200,2],[203,3],[218,2],[221,6],[238,2],[241,6],[259,2],[262,8],[281,2],[284,10]]},"262":{"position":[[124,2],[127,13],[151,2],[154,6],[171,2],[174,10],[195,2],[223,2],[226,25],[268,2],[271,21],[303,2],[306,8],[324,2],[327,16],[362,2],[365,8],[385,2],[388,8],[413,2],[416,23],[453,2],[456,15],[485,2],[488,15],[515,2],[518,19],[547,2],[550,18],[578,2],[581,8],[599,2],[602,8],[627,2],[630,13],[659,2],[662,16]]},"266":{"position":[[11,1],[20,5],[89,11],[108,24],[146,24],[171,2],[187,8],[211,1],[234,2],[240,2],[248,4],[255,1],[298,2],[305,7],[332,1],[336,5],[367,2],[374,6],[402,1],[406,5],[443,2],[446,1],[452,1],[456,5],[498,1],[503,3],[509,1],[515,4],[522,4],[544,5],[559,2],[565,7],[575,6],[592,2],[598,6],[607,7],[626,2],[632,3],[638,8],[657,2],[663,8],[680,2],[683,2],[689,2],[696,6],[716,2],[722,2],[729,9],[750,2],[753,1],[758,10],[783,2],[786,1],[791,2],[798,8],[820,2],[823,1],[828,10],[855,2],[858,1],[888,2],[891,1],[896,11],[921,2],[924,1],[929,2],[936,8],[960,2],[963,2],[980,2],[983,1],[988,12],[1015,1],[1020,2],[1026,2],[1029,1],[1034,7],[1045,1]]},"269":{"position":[[97,1],[103,1],[109,1],[115,1],[117,8],[148,1],[182,15],[207,1],[216,2],[235,1],[245,2],[248,1],[295,1],[321,8],[344,8],[365,10],[401,1],[429,1],[450,1],[452,8],[492,1],[514,1],[558,1],[600,1],[607,1],[689,1],[704,1],[706,8],[742,1],[840,1],[845,1],[854,1],[911,1],[918,1],[956,1],[966,1],[1018,2],[1024,1],[1065,1],[1067,1],[1069,1],[1083,1],[1085,8],[1121,1],[1188,2],[1197,1],[1199,2],[1208,2],[1211,1],[1213,2],[1288,1],[1293,1],[1302,1],[1325,1],[1344,1],[1351,1],[1389,1],[1399,1],[1451,2],[1457,1],[1525,2],[1534,1],[1536,2],[1543,2],[1546,1],[1548,2],[1557,1],[1572,1],[1574,1],[1588,1],[1598,1],[1632,1],[1647,2],[1655,1],[1657,2],[1664,2],[1667,1],[1669,2],[1686,1],[1688,1],[1697,2],[1718,1],[1720,1],[1740,1],[1946,1],[1963,2],[1966,14],[1991,1],[1998,1],[2021,2],[2030,2],[2033,1],[2035,2],[2043,2],[2057,2],[2060,14],[2257,1],[2280,1],[2287,1],[2313,2],[2325,2],[2328,1],[2330,2],[2338,2],[2360,2],[2377,1]]},"271":{"position":[[99,1],[105,1],[111,1],[117,1],[189,8],[220,1],[273,15],[298,1],[332,2],[351,1],[386,2],[389,1],[436,1],[462,8],[485,8],[506,10],[542,1],[570,1],[591,1],[601,1],[625,1],[646,1],[648,8],[688,1],[710,1],[754,1],[847,1],[854,1],[1038,1],[1053,1],[1055,1],[1137,2],[1175,1],[1205,1],[1338,1],[1340,1],[1419,1],[1501,1],[1649,1],[1751,1],[1899,1],[1901,1],[2126,1],[2128,1],[2194,1],[2278,1],[2337,1],[2374,1],[2416,1],[2469,1],[2502,1],[2514,1]]},"273":{"position":[[234,5],[259,5],[310,1],[329,1],[424,8],[448,4],[485,3],[580,4],[660,2],[693,10],[728,2],[764,1],[766,5],[790,1],[806,1],[865,1],[911,1],[955,1],[1004,1],[1025,2],[1031,1],[1043,1],[1045,1],[1058,1],[1092,1],[1094,11],[1106,11],[1118,5],[1124,7],[1132,7],[1267,6],[1283,1],[1289,6],[1347,1],[1352,6],[1410,1],[1415,7],[1495,6],[1531,1],[1553,1],[1565,1],[1577,1],[1603,1],[1635,2],[1659,1],[1771,1],[1773,7],[1788,1],[1832,1],[1834,8],[1851,1],[1856,1],[1863,1],[1865,6],[1893,1],[1898,1],[1900,1],[1913,1],[1915,2],[1988,3],[2004,1],[2070,2],[2100,7],[2151,6],[2184,4],[2204,10],[2262,1],[2328,7],[2368,1],[2370,5],[2427,1],[2451,1],[2477,5],[2494,8],[2503,1],[2510,1],[2563,1],[2621,9],[2631,1],[2633,1],[2635,2],[2638,8],[2647,2],[2650,11],[2663,15],[2702,1],[2715,1],[2795,2],[2868,1],[2897,1],[2926,1],[3030,4],[3038,1],[3109,1],[3132,1],[3160,1],[3198,1],[3200,1],[3212,1]]},"276":{"position":[[197,1],[218,2],[221,10],[278,2],[307,1],[309,2],[357,7],[400,2],[403,13],[456,2],[459,11],[523,2],[526,15],[584,2],[587,15],[614,1],[616,2],[619,10],[676,2],[705,1],[742,2],[745,14],[799,2],[802,11],[866,2],[869,15],[927,2],[930,15],[957,1],[959,2],[962,3],[988,1],[1063,1],[1065,2],[1068,4],[1090,1],[1151,1],[1255,1],[1333,3],[1337,2],[1340,9],[1350,1],[1352,2],[1355,4],[1378,1],[1465,1],[1467,2],[1470,4],[1493,1],[1580,1],[1582,2],[1585,4],[1609,1],[1698,1],[1711,1],[1743,1],[1795,2],[1798,4],[1803,2],[1847,2],[1850,4],[1897,2],[1900,4],[1939,2],[1942,9],[1952,2],[1989,2],[1992,9],[2011,2],[2014,4],[2019,2],[2032,2],[2035,4],[2040,2],[2053,2],[2067,2],[2070,4],[2075,2],[2088,2],[2101,2],[2104,4],[2109,2],[2122,1],[2134,1]]},"278":{"position":[[197,1],[264,1],[445,1],[493,1],[674,1],[698,1],[773,1],[792,1],[853,1],[957,1],[1035,3],[1039,1],[1059,1],[1146,1],[1166,1],[1253,1],[1274,1],[1363,1],[1376,1],[1408,1],[1460,2],[1580,2],[1626,2],[1639,2],[1653,2],[1666,1],[1678,1]]},"280":{"position":[[277,1],[340,2],[350,1],[537,1],[558,1],[570,2],[652,1],[665,1],[827,1]]},"283":{"position":[[0,24],[25,23],[49,5],[55,3]]},"285":{"position":[[0,26],[179,20],[495,17],[801,16]]},"287":{"position":[[0,31],[261,10],[341,1]]},"291":{"position":[[21,2],[58,2],[91,2],[125,2],[128,2],[142,2],[145,4],[160,2],[163,5],[184,2],[187,8],[210,2],[213,7],[221,2],[269,1],[279,2],[299,2]]},"293":{"position":[[19,1],[44,2],[47,7],[67,2],[70,7],[78,2]]},"295":{"position":[[9,1],[31,2],[57,2],[60,6],[78,2],[81,2],[86,2],[101,2],[119,2],[128,2],[131,21]]},"297":{"position":[[46,2],[49,6],[67,2],[70,6],[87,2],[90,6],[106,2],[109,6]]},"299":{"position":[[0,2],[3,6],[77,2],[80,10],[94,4],[102,4],[122,2],[125,5]]},"305":{"position":[[12,2],[15,7],[23,2],[60,2],[63,7],[84,5],[133,2],[171,2],[207,2],[214,6],[238,2],[271,2]]},"307":{"position":[[15,2],[62,1],[241,12]]},"309":{"position":[[28,2],[31,10],[42,2],[45,7],[53,2],[56,2],[59,2],[62,2],[65,2],[84,2],[104,2],[122,2],[139,2],[160,2],[180,2],[191,6]]},"312":{"position":[[0,88],[89,62]]},"314":{"position":[[0,115],[116,80],[197,64],[262,47],[310,50]]},"316":{"position":[[0,78],[79,65]]},"321":{"position":[[271,1],[494,3],[498,14],[513,32],[546,15]]},"325":{"position":[[24,1],[37,1],[116,1],[307,1],[352,1]]},"327":{"position":[[22,1],[40,1],[44,1],[52,1],[158,1],[176,1],[180,1],[188,1],[296,1]]},"330":{"position":[[37,19],[89,1],[189,1]]},"332":{"position":[[0,4],[317,2],[353,2],[356,2],[707,2],[719,2],[743,2],[777,2],[803,2],[817,2],[832,2],[880,2],[893,1],[948,2],[1001,1],[1033,1],[1064,1],[1121,4],[1176,2],[1228,2],[1236,1],[1253,2],[1266,2],[1269,2],[1345,2],[1442,1],[1454,1],[1496,1],[1518,1],[1541,1],[1588,1],[1638,1],[1685,1],[1710,1],[1744,1],[1776,1],[1821,1],[1871,1]]},"334":{"position":[[0,2],[33,74],[196,62],[393,4],[398,31]]},"336":{"position":[[662,4],[814,18],[833,2],[836,11],[848,5],[870,10],[881,5],[887,27],[915,5]]},"338":{"position":[[110,2],[126,57],[184,39]]},"340":{"position":[[46,18],[65,10],[99,5],[105,1],[107,5],[113,5],[174,2],[229,24],[362,7],[370,2],[403,1],[412,1],[414,26],[502,1],[508,2],[514,1],[663,5],[672,5],[896,1],[919,1],[966,1],[1016,1],[1127,1],[1160,1],[1206,1],[1356,2],[1398,2],[1601,2],[1647,2],[1715,2],[1778,1],[1784,2],[1822,1],[2029,2],[2075,2]]},"342":{"position":[[0,22],[23,12],[88,10],[236,1],[242,2],[260,1]]},"344":{"position":[[0,8],[9,1],[11,8],[20,1],[22,27],[113,1],[186,12],[348,1],[402,5],[414,3],[510,1],[512,32],[545,1],[547,10],[570,68],[639,72],[729,1],[740,41],[790,1],[798,2],[804,1],[806,1],[846,4],[851,11],[863,4],[868,34]]},"346":{"position":[[25,1],[27,1],[29,2],[230,5]]},"348":{"position":[[37,1],[64,1],[129,4],[182,3],[186,8],[510,63]]},"350":{"position":[[0,2]]},"352":{"position":[[106,12],[162,1]]},"354":{"position":[[95,1]]},"356":{"position":[[107,1],[133,2],[136,6]]},"358":{"position":[[0,2],[3,8]]},"364":{"position":[[72,1],[161,15],[253,1],[297,1],[335,1],[452,1],[564,1]]},"374":{"position":[[139,16]]},"378":{"position":[[0,24],[25,14],[40,32]]},"383":{"position":[[197,1],[249,1],[503,1],[542,1],[771,1],[775,1],[1022,1],[1067,1],[1137,1],[1173,1],[1190,1],[1252,1],[1285,1],[1358,2],[1459,2],[1483,2],[1580,1],[1628,1],[1683,1],[1803,1],[1841,1],[1991,1],[2026,1],[2037,1],[2097,1],[2163,1],[2175,1],[2197,1],[2209,1],[2231,1],[2236,1],[2251,2],[2257,2],[2271,2],[2310,1],[2319,1],[2324,1],[2376,1],[2435,1],[2564,1],[2577,1],[2650,1],[2678,1],[2735,1],[2751,1]]},"390":{"position":[[0,35]]},"394":{"position":[[89,38],[158,1],[306,1]]},"397":{"position":[[83,1],[102,1],[121,1],[141,1],[160,1],[180,1],[209,1],[267,1],[293,1],[323,1],[388,1],[485,1],[518,1],[524,1],[551,1],[556,1],[589,1],[594,1],[606,3],[610,1],[625,1],[714,1],[762,1],[764,8],[775,1],[862,1],[893,2],[930,1],[1041,1],[1069,1],[1097,1],[1130,3],[1203,3],[1209,1],[1250,1],[1291,1],[1324,3],[1406,3]]},"400":{"position":[[0,150]]},"402":{"position":[[335,30],[498,38],[537,15],[566,1],[576,1],[593,1],[598,2],[610,1],[614,1],[619,1],[630,1],[634,1]]},"409":{"position":[[85,2],[88,35],[364,1],[368,18],[398,3],[402,8],[413,24],[442,19],[462,3],[466,48],[515,3],[519,51],[582,3],[586,8],[597,31],[631,7],[639,3],[643,14],[660,31],[692,3],[696,47],[746,14],[763,6],[772,19]]},"411":{"position":[[136,5]]},"413":{"position":[[0,2],[3,14]]},"417":{"position":[[0,2]]},"422":{"position":[[29,7]]},"424":{"position":[[30,1],[41,1],[43,3],[47,1],[181,1],[183,22],[206,1],[278,1],[353,1]]},"426":{"position":[[30,1],[152,1],[154,22]]},"428":{"position":[[0,18]]},"430":{"position":[[24,1],[26,4],[183,7],[191,1],[193,1],[195,4],[281,7],[289,1]]},"432":{"position":[[75,13]]},"434":{"position":[[0,16],[26,16]]},"439":{"position":[[148,12],[209,2],[330,5]]},"444":{"position":[[43,6],[56,2],[92,2]]},"446":{"position":[[0,2],[3,12]]},"450":{"position":[[0,2]]},"454":{"position":[[0,6],[65,1],[148,2],[151,9],[211,1],[412,2],[433,1]]},"456":{"position":[[0,2],[3,24]]},"458":{"position":[[21,20]]},"460":{"position":[[15,18],[67,13],[81,7],[89,10],[127,4],[132,15],[161,11]]},"462":{"position":[[0,7],[8,12]]}}}],["0",{"_index":14,"t":{"3":{"position":[[192,14],[476,1],[484,3],[499,3],[612,1],[620,3],[635,3]]},"5":{"position":[[184,14],[468,1],[476,3],[491,3],[604,1],[612,3],[627,3]]},"11":{"position":[[972,2],[996,3]]},"13":{"position":[[962,2],[986,3]]},"19":{"position":[[192,14],[476,1],[484,3],[499,3],[612,1],[620,3],[635,3]]},"21":{"position":[[184,14],[468,1],[476,3],[491,3],[604,1],[612,3],[627,3]]},"38":{"position":[[0,17]]},"95":{"position":[[2156,5]]},"108":{"position":[[50,5]]},"124":{"position":[[105,2],[145,2],[165,2],[244,5],[359,5],[443,33],[600,25]]},"126":{"position":[[191,2],[231,2],[251,2],[330,5],[445,5],[529,33],[686,25],[856,4],[893,4],[994,4],[1103,2]]},"195":{"position":[[115,2],[129,2],[210,2]]},"197":{"position":[[96,2],[167,2],[183,2]]},"203":{"position":[[160,24],[185,3],[256,2],[259,3]]},"205":{"position":[[86,3],[142,1],[152,2],[367,1],[379,2],[468,2]]},"207":{"position":[[48,22]]},"211":{"position":[[90,2],[100,2],[144,2],[160,2],[208,4],[231,2],[365,2]]},"240":{"position":[[53,2]]},"248":{"position":[[1605,2],[1659,2]]},"266":{"position":[[253,1],[273,1],[839,1]]},"269":{"position":[[842,2],[913,2],[1021,2],[1290,2],[1346,2],[1454,2],[1993,2],[2282,2],[2374,2]]},"271":{"position":[[2511,2]]},"273":{"position":[[92,1],[121,1],[792,2],[808,2],[3089,3],[3209,2]]},"276":{"position":[[382,2],[724,2],[1007,2],[2131,2]]},"278":{"position":[[283,2],[512,2],[717,2],[1675,2]]},"280":{"position":[[824,2]]},"309":{"position":[[114,7],[172,7]]},"325":{"position":[[169,14]]},"327":{"position":[[38,1],[46,3],[61,3],[174,1],[182,3],[197,3]]},"340":{"position":[[405,4],[669,2]]},"344":{"position":[[350,5]]},"383":{"position":[[2260,1]]},"385":{"position":[[44,74]]},"394":{"position":[[308,1]]},"397":{"position":[[958,2],[982,3]]},"402":{"position":[[568,1],[578,1],[601,1]]},"424":{"position":[[131,5]]},"426":{"position":[[56,5],[62,5]]}}}],["0.0007",{"_index":1797,"t":{"348":{"position":[[278,6]]}}}],["0.1",{"_index":122,"t":{"11":{"position":[[840,5],[864,4]]},"13":{"position":[[830,5],[854,4]]},"397":{"position":[[826,5],[850,4]]}}}],["0.2",{"_index":125,"t":{"11":{"position":[[869,6]]},"13":{"position":[[859,6]]},"397":{"position":[[855,6]]}}}],["0.25",{"_index":776,"t":{"231":{"position":[[188,5]]}}}],["0.3",{"_index":119,"t":{"11":{"position":[[816,5]]},"13":{"position":[[806,5]]},"397":{"position":[[802,5]]}}}],["0.553y^​=0.4561x1​−0.0007x2​+0.3251x3​+0.0009x4​+0.0001x5​−0.9142x6​−0.553",{"_index":1804,"t":{"348":{"position":[[343,74]]}}}],["0.7",{"_index":123,"t":{"11":{"position":[[846,4]]},"13":{"position":[[836,4]]},"397":{"position":[[832,4]]}}}],["0.8",{"_index":120,"t":{"11":{"position":[[828,4],[851,5]]},"13":{"position":[[818,4],[841,5]]},"397":{"position":[[814,4],[837,5]]}}}],["0.88",{"_index":124,"t":{"11":{"position":[[857,6]]},"13":{"position":[[847,6]]},"397":{"position":[[843,6]]}}}],["0.9",{"_index":118,"t":{"11":{"position":[[811,4],[822,5]]},"13":{"position":[[801,4],[812,5]]},"383":{"position":[[2737,4]]},"397":{"position":[[797,4],[808,5]]}}}],["0.9142",{"_index":1802,"t":{"348":{"position":[[330,6]]}}}],["0.99",{"_index":121,"t":{"11":{"position":[[833,6]]},"13":{"position":[[823,6]]},"397":{"position":[[819,6]]}}}],["01",{"_index":1139,"t":{"266":{"position":[[500,2]]}}}],["012",{"_index":1132,"t":{"266":{"position":[[370,3]]}}}],["0x3f3f3f3f",{"_index":738,"t":{"226":{"position":[[489,10]]},"280":{"position":[[131,10]]}}}],["0xf",{"_index":1128,"t":{"266":{"position":[[301,3]]}}}],["0xff;//d",{"_index":362,"t":{"110":{"position":[[294,8],[1506,8]]}}}],["0将通过从w",{"_index":1791,"t":{"348":{"position":[[25,11]]}}}],["0是手动调节的。我们把这个损失函数叫做l1。请注意,除了w",{"_index":1699,"t":{"340":{"position":[[373,29]]}}}],["0,计算w",{"_index":1770,"t":{"344":{"position":[[63,10]]}}}],["1",{"_index":15,"t":{"3":{"position":[[207,13],[624,1]]},"5":{"position":[[199,13],[616,1]]},"11":{"position":[[975,3],[1261,4],[1400,4]]},"13":{"position":[[965,3],[1251,4],[1390,4]]},"19":{"position":[[207,13],[624,1]]},"21":{"position":[[199,13],[616,1]]},"38":{"position":[[18,12]]},"44":{"position":[[77,57]]},"126":{"position":[[888,2]]},"129":{"position":[[20,1],[25,13],[93,1],[263,37]]},"142":{"position":[[134,16]]},"149":{"position":[[34,7]]},"151":{"position":[[0,2]]},"153":{"position":[[0,2]]},"157":{"position":[[70,2],[190,2]]},"159":{"position":[[84,2]]},"161":{"position":[[91,2]]},"167":{"position":[[100,2]]},"174":{"position":[[0,2]]},"176":{"position":[[0,2]]},"197":{"position":[[63,21]]},"205":{"position":[[200,2],[240,2],[293,3]]},"209":{"position":[[63,1]]},"211":{"position":[[243,2],[331,2],[386,1]]},"226":{"position":[[742,3],[772,2],[775,2]]},"244":{"position":[[198,4]]},"248":{"position":[[1597,2]]},"266":{"position":[[264,8],[342,1],[573,1],[605,1],[636,1],[739,1],[769,1],[807,1]]},"269":{"position":[[213,2],[1315,3],[1611,3],[1630,1],[1774,3],[2109,3]]},"271":{"position":[[1645,3],[1747,3]]},"273":{"position":[[76,1],[105,1],[143,1],[1055,2],[2864,3]]},"276":{"position":[[862,3],[923,3],[1935,3],[1985,3]]},"278":{"position":[[617,3],[659,3],[1576,3],[1613,3]]},"280":{"position":[[370,2],[568,1],[797,3]]},"299":{"position":[[91,2]]},"309":{"position":[[76,7],[93,10]]},"325":{"position":[[132,2],[184,13]]},"327":{"position":[[186,1]]},"332":{"position":[[1564,3],[1628,3],[1799,3]]},"336":{"position":[[38,1]]},"340":{"position":[[500,1],[512,1],[942,3],[1183,3]]},"348":{"position":[[98,30]]},"356":{"position":[[87,1]]},"383":{"position":[[404,2],[407,2],[2233,2],[2288,2],[2321,2],[2437,2]]},"397":{"position":[[961,3],[1243,4],[1382,4]]},"404":{"position":[[22,20]]},"409":{"position":[[362,1],[387,10],[411,1]]}}}],["1)&&(countout(1)==n",{"_index":1235,"t":{"271":{"position":[[1625,19]]}}}],["1))7label",{"_index":1722,"t":{"340":{"position":[[1006,9]]}}}],["1),indegree(0),outdegree(0",{"_index":786,"t":{"231":{"position":[[429,27]]},"271":{"position":[[304,27]]}}}],["1)[1,−1)的索引区间中的元素值都会加1,而对于某次刷漆终点e的下一个索引为e+1的元素值由于−1",{"_index":637,"t":{"207":{"position":[[193,51]]}}}],["1+max(getheight(t[rt].l),getheight(t[rt].r",{"_index":1424,"t":{"276":{"position":[[1017,45]]},"278":{"position":[[727,45]]}}}],["1,0",{"_index":754,"t":{"226":{"position":[[778,4]]}}}],["1,0,1,0,0,1,0",{"_index":752,"t":{"226":{"position":[[727,14]]}}}],["1,0,1,1",{"_index":756,"t":{"226":{"position":[[790,8]]}}}],["1,0,n",{"_index":1453,"t":{"276":{"position":[[1929,5],[1979,5]]},"278":{"position":[[1570,5],[1607,5]]},"280":{"position":[[791,5]]}}}],["1,1,0",{"_index":755,"t":{"226":{"position":[[783,6]]}}}],["1,1,0,1,1",{"_index":757,"t":{"226":{"position":[[799,11]]}}}],["1,l2+1,l2+p2",{"_index":1481,"t":{"280":{"position":[[475,14]]}}}],["1,lb+1,lb+p2",{"_index":1415,"t":{"276":{"position":[[508,14]]},"278":{"position":[[377,14]]}}}],["1,lb,lb+p2",{"_index":1420,"t":{"276":{"position":[[851,10]]},"278":{"position":[[606,10]]}}}],["1.0",{"_index":733,"t":{"226":{"position":[[454,4]]}}}],["1.51",{"_index":395,"t":{"110":{"position":[[876,4]]}}}],["1.vector",{"_index":834,"t":{"244":{"position":[[22,8]]}}}],["10",{"_index":23,"t":{"3":{"position":[[358,3],[762,3]]},"5":{"position":[[350,3],[754,3]]},"19":{"position":[[358,3],[762,3]]},"21":{"position":[[350,3],[754,3]]},"124":{"position":[[229,2]]},"126":{"position":[[315,2]]},"195":{"position":[[142,2],[151,3],[160,3]]},"197":{"position":[[109,2],[118,3],[136,3],[219,2]]},"211":{"position":[[196,2]]},"226":{"position":[[683,3]]},"231":{"position":[[121,3],[287,3]]},"236":{"position":[[144,33]]},"325":{"position":[[335,3]]},"327":{"position":[[324,3]]},"383":{"position":[[850,4],[2742,2]]}}}],["10,3.141590,\"method",{"_index":767,"t":{"231":{"position":[[38,19]]}}}],["10.multimap",{"_index":849,"t":{"244":{"position":[[567,11]]}}}],["100",{"_index":22,"t":{"3":{"position":[[348,4],[353,4],[752,4],[757,4]]},"5":{"position":[[340,4],[345,4],[744,4],[749,4]]},"19":{"position":[[348,4],[353,4],[752,4],[757,4]]},"21":{"position":[[340,4],[345,4],[744,4],[749,4]]},"95":{"position":[[687,4]]},"236":{"position":[[77,42]]},"273":{"position":[[281,3]]},"325":{"position":[[325,4],[330,4]]},"327":{"position":[[314,4],[319,4]]}}}],["1000",{"_index":616,"t":{"195":{"position":[[72,5]]}}}],["1000000007",{"_index":1463,"t":{"280":{"position":[[154,10]]}}}],["1010",{"_index":1133,"t":{"266":{"position":[[390,7],[412,4],[462,4]]}}}],["106",{"_index":270,"t":{"95":{"position":[[957,4]]}}}],["108",{"_index":1818,"t":{"352":{"position":[[164,3]]}}}],["10px",{"_index":2163,"t":{"454":{"position":[[367,5]]}}}],["10的vector,初始化为3",{"_index":1524,"t":{"291":{"position":[[24,23]]}}}],["11",{"_index":1362,"t":{"273":{"position":[[2726,2]]}}}],["11.hash_set",{"_index":850,"t":{"244":{"position":[[597,11]]}}}],["1111",{"_index":1129,"t":{"266":{"position":[[321,6]]}}}],["11×1",{"_index":542,"t":{"144":{"position":[[150,18]]}}}],["11×1卷积核,每个filter对上一步的featur",{"_index":1825,"t":{"356":{"position":[[14,28]]}}}],["12",{"_index":736,"t":{"226":{"position":[[474,2]]},"266":{"position":[[381,8]]},"356":{"position":[[109,2]]}}}],["12.hash_multiset",{"_index":852,"t":{"244":{"position":[[629,16]]}}}],["120",{"_index":1937,"t":{"383":{"position":[[780,5]]}}}],["120,210都是30的倍数,由于要找最大的,所以答案是210",{"_index":645,"t":{"209":{"position":[[107,33]]}}}],["13.hash_map",{"_index":853,"t":{"244":{"position":[[666,11]]}}}],["1313×13个grid",{"_index":576,"t":{"151":{"position":[[75,12]]}}}],["14.hash_multimap",{"_index":854,"t":{"244":{"position":[[698,16]]}}}],["148",{"_index":287,"t":{"95":{"position":[[1237,8]]}}}],["149",{"_index":243,"t":{"95":{"position":[[462,8]]}}}],["16",{"_index":1934,"t":{"383":{"position":[[669,3]]}}}],["18446744073709551615ull",{"_index":745,"t":{"226":{"position":[[602,24]]}}}],["1?'\\n",{"_index":1435,"t":{"276":{"position":[[1324,8]]},"278":{"position":[[1026,8]]}}}],["1\\eta",{"_index":1860,"t":{"364":{"position":[[545,10]]}}}],["1\\eta=1η=1",{"_index":1755,"t":{"342":{"position":[[36,12]]}}}],["1][b,c,1,1]的tensor",{"_index":587,"t":{"157":{"position":[[73,43]]}}}],["1][b,c,1,1]的tensor,再送入共享的多层感知机网络进行降维再升维,最后将二者相加再经过sigmoid",{"_index":591,"t":{"159":{"position":[[87,72]]}}}],["1][b,c,1,1]的tensor,该tensor",{"_index":605,"t":{"167":{"position":[[103,37]]}}}],["1],即reduce了dim=1",{"_index":116,"t":{"11":{"position":[[756,19]]},"13":{"position":[[746,19]]},"397":{"position":[[742,19]]}}}],["1_44=fd.img",{"_index":310,"t":{"95":{"position":[[1685,12]]},"112":{"position":[[629,12]]}}}],["1_44=fd_aug.img",{"_index":312,"t":{"95":{"position":[[1724,16]]},"112":{"position":[[668,16]]}}}],["1e",{"_index":735,"t":{"226":{"position":[[471,2]]}}}],["1e5",{"_index":748,"t":{"226":{"position":[[677,3]]}}}],["1e9",{"_index":750,"t":{"226":{"position":[[693,3]]}}}],["1import",{"_index":1717,"t":{"340":{"position":[[849,7]]}}}],["1k−1",{"_index":1881,"t":{"369":{"position":[[117,4]]}}}],["1min、5min",{"_index":2168,"t":{"458":{"position":[[0,20]]}}}],["1}a−1",{"_index":611,"t":{"179":{"position":[[122,5]]},"189":{"position":[[122,5]]}}}],["1}{n",{"_index":341,"t":{"101":{"position":[[300,5]]}}}],["1×11",{"_index":540,"t":{"144":{"position":[[130,12]]},"356":{"position":[[0,6]]}}}],["1×1×3×4=12(3)1",{"_index":1826,"t":{"356":{"position":[[65,14]]}}}],["1ηλ<1",{"_index":1861,"t":{"364":{"position":[[566,15]]}}}],["1−1。这样在所有输入结束后的计算前缀和阶段,在每一个值为[1,−1)[1",{"_index":636,"t":{"207":{"position":[[153,38]]}}}],["1−1加上之前元素所累积的1",{"_index":639,"t":{"207":{"position":[[261,42]]}}}],["1−1而抵消影响(自身值为−1",{"_index":638,"t":{"207":{"position":[[245,15]]}}}],["1−σ)(2",{"_index":1589,"t":{"325":{"position":[[160,8]]}}}],["1−σ)(2)\\frac{{\\rm",{"_index":1587,"t":{"325":{"position":[[77,18]]}}}],["1个filter,其中包含3个kernel。每个kernel分别对输入图像的3",{"_index":1820,"t":{"354":{"position":[[0,55]]}}}],["1,即n0",{"_index":497,"t":{"129":{"position":[[80,5]]}}}],["2",{"_index":131,"t":{"11":{"position":[[990,2],[993,2],[1078,2],[1247,2],[1258,2],[1358,2],[1397,2]]},"13":{"position":[[980,2],[983,2],[1068,2],[1237,2],[1248,2],[1348,2],[1387,2]]},"126":{"position":[[900,2],[945,3]]},"129":{"position":[[72,1]]},"140":{"position":[[5,1]]},"149":{"position":[[783,2]]},"151":{"position":[[8,2]]},"153":{"position":[[194,2]]},"174":{"position":[[44,2]]},"176":{"position":[[44,2]]},"244":{"position":[[207,4]]},"269":{"position":[[1808,3],[2199,3]]},"273":{"position":[[163,1]]},"285":{"position":[[474,20]]},"299":{"position":[[99,2]]},"305":{"position":[[188,18]]},"332":{"position":[[1657,3],[1840,3]]},"336":{"position":[[199,1]]},"342":{"position":[[365,1]]},"397":{"position":[[976,2],[979,2],[1060,2],[1229,2],[1240,2],[1340,2],[1379,2]]}}}],["2)中,当i,ji,ji,j",{"_index":1498,"t":{"285":{"position":[[426,47]]}}}],["2.95.2",{"_index":252,"t":{"95":{"position":[[658,6]]}}}],["2.list",{"_index":835,"t":{"244":{"position":[[51,6]]}}}],["20",{"_index":1281,"t":{"273":{"position":[[354,2]]}}}],["20,\"abc",{"_index":1530,"t":{"293":{"position":[[21,11]]}}}],["200",{"_index":316,"t":{"95":{"position":[[1799,3]]},"112":{"position":[[743,3]]}}}],["2001,2003,2004",{"_index":380,"t":{"110":{"position":[[682,14]]}}}],["2003",{"_index":385,"t":{"110":{"position":[[752,5]]}}}],["2004",{"_index":390,"t":{"110":{"position":[[821,5]]}}}],["201",{"_index":643,"t":{"209":{"position":[[65,14],[141,8]]}}}],["201,210,012,021,102,120",{"_index":644,"t":{"209":{"position":[[80,26]]}}}],["201,让数字随意组合,是否能组合出30的倍数,如果能够组合成30",{"_index":642,"t":{"209":{"position":[[3,59]]}}}],["20px",{"_index":2161,"t":{"454":{"position":[[327,5]]}}}],["210",{"_index":646,"t":{"209":{"position":[[150,8]]}}}],["2147483647",{"_index":741,"t":{"226":{"position":[[529,11]]}}}],["235f2db4c261",{"_index":1673,"t":{"334":{"position":[[183,12]]}}}],["256",{"_index":2018,"t":{"383":{"position":[[2652,3]]}}}],["27",{"_index":1822,"t":{"354":{"position":[[97,2]]}}}],["28",{"_index":1921,"t":{"383":{"position":[[410,3],[414,3]]}}}],["2])15cross_loss(predict",{"_index":1729,"t":{"340":{"position":[[1225,24]]}}}],["2])8nllloss(predict",{"_index":1723,"t":{"340":{"position":[[1035,20]]}}}],["2λw",{"_index":1783,"t":{"346":{"position":[[32,17]]}}}],["2型文法(上下文无关语法,cfg",{"_index":185,"t":{"48":{"position":[[0,32]]}}}],["2型文法,又称上下文无关文法(context",{"_index":165,"t":{"38":{"position":[[31,22]]}}}],["2,抹除所有置信度更小的其iou超过阈值的bbox",{"_index":570,"t":{"149":{"position":[[679,39]]}}}],["3",{"_index":83,"t":{"11":{"position":[[132,2],[171,2],[301,3],[519,2],[522,2],[525,4],[534,3],[567,2],[605,2],[610,3],[614,2],[617,2],[882,2],[1081,3],[1250,3],[1254,3],[1361,4]]},"13":{"position":[[122,2],[161,2],[291,3],[509,2],[512,2],[515,4],[524,3],[557,2],[595,2],[600,3],[604,2],[607,2],[872,2],[1071,3],[1240,3],[1244,3],[1351,4]]},"151":{"position":[[19,2]]},"153":{"position":[[231,2]]},"238":{"position":[[26,2]]},"244":{"position":[[214,5]]},"269":{"position":[[1791,3],[2217,3],[2235,3]]},"307":{"position":[[64,2]]},"332":{"position":[[1561,2],[1568,3],[1796,2],[1803,3]]},"340":{"position":[[939,2],[946,3],[1180,2],[1187,3]]},"352":{"position":[[142,1],[151,1]]},"354":{"position":[[77,1],[93,1]]},"356":{"position":[[96,1]]},"397":{"position":[[118,2],[157,2],[287,3],[505,2],[508,2],[511,4],[520,3],[553,2],[591,2],[596,3],[600,2],[603,2],[868,2],[1063,3],[1232,3],[1236,3],[1343,4]]},"409":{"position":[[629,1],[658,1],[744,1],[770,1]]},"454":{"position":[[88,3]]}}}],["3)中的a,ba,ba,b可缩小范围,并不用来实现全连接,此时a,ba,ba,b代表着卷积核的感受野,即kernel",{"_index":1501,"t":{"285":{"position":[[698,97]]}}}],["3.14159);//开辟一个存放单精度数的空间,并指定该实数的初值为//3.14159,将返回的该空间的地址赋给指针变量p",{"_index":819,"t":{"236":{"position":[[197,63]]}}}],["3.141590",{"_index":771,"t":{"231":{"position":[[139,9]]}}}],["3.1中的l1。如果w",{"_index":1790,"t":{"348":{"position":[[0,24]]}}}],["3.dequ",{"_index":836,"t":{"244":{"position":[[77,7]]}}}],["30",{"_index":650,"t":{"211":{"position":[[225,2]]}}}],["300000",{"_index":318,"t":{"95":{"position":[[1826,6]]},"112":{"position":[[770,6]]}}}],["30]的tensor",{"_index":564,"t":{"149":{"position":[[453,17]]}}}],["30]的tensor(包含所有预测框的坐标、置信度和类别结果),通过解析输出的tensor",{"_index":549,"t":{"149":{"position":[[99,51]]}}}],["32",{"_index":1124,"t":{"266":{"position":[[213,3]]}}}],["33×3卷积核,padding=1,stride=1padding=1",{"_index":1813,"t":{"350":{"position":[[100,36]]}}}],["33×3卷积的消融实验发现,7×77",{"_index":597,"t":{"163":{"position":[[96,18]]}}}],["35deg",{"_index":2155,"t":{"454":{"position":[[253,6]]}}}],["3][5,5,3",{"_index":1809,"t":{"350":{"position":[[26,12]]}}}],["3]图像,输出[7",{"_index":548,"t":{"149":{"position":[[85,10]]}}}],["3×33",{"_index":1812,"t":{"350":{"position":[[86,6]]}}}],["3×3×3×4=108(1)3",{"_index":1817,"t":{"352":{"position":[[119,15]]}}}],["3×3××3=27(2)3",{"_index":1821,"t":{"354":{"position":[[56,13]]}}}],["3个损失函数,使用梯度下降优化来求解线性回归模型。回想一下,更新梯度下降中的参数w",{"_index":1709,"t":{"340":{"position":[[678,57]]}}}],["3型文法,又称正规文法(regular",{"_index":173,"t":{"38":{"position":[[245,19]]}}}],["3科成绩(假设年级只有a班和b",{"_index":90,"t":{"11":{"position":[[225,40]]},"13":{"position":[[215,40]]},"397":{"position":[[211,40]]}}}],["4",{"_index":93,"t":{"11":{"position":[[298,2],[878,3],[1302,4],[1419,4]]},"13":{"position":[[288,2],[868,3],[1292,4],[1409,4]]},"70":{"position":[[0,11]]},"266":{"position":[[334,1],[404,1],[454,1]]},"269":{"position":[[1825,3],[2127,3]]},"352":{"position":[[160,1]]},"356":{"position":[[105,1]]},"397":{"position":[[284,2],[864,3],[1284,4],[1401,4]]}}}],["4.stack",{"_index":840,"t":{"244":{"position":[[274,7]]}}}],["40px",{"_index":2164,"t":{"454":{"position":[[407,4]]}}}],["448",{"_index":547,"t":{"149":{"position":[[72,7],[80,4]]}}}],["4][5,5,4]的featur",{"_index":1811,"t":{"350":{"position":[[64,17]]}}}],["4个filter(输出通道为4),每个filter3个kernel(输入通道为3",{"_index":1815,"t":{"352":{"position":[[0,45]]}}}],["5",{"_index":136,"t":{"11":{"position":[[1106,2],[1288,2],[1299,2],[1377,2],[1416,2]]},"13":{"position":[[1096,2],[1278,2],[1289,2],[1367,2],[1406,2]]},"269":{"position":[[1842,3],[1859,3],[2145,3],[2163,3],[2181,3]]},"350":{"position":[[23,2],[61,2]]},"381":{"position":[[97,40]]},"383":{"position":[[773,1],[777,2],[2254,2]]},"397":{"position":[[1088,2],[1270,2],[1281,2],[1359,2],[1398,2]]}}}],["5.queue",{"_index":842,"t":{"244":{"position":[[334,7]]}}}],["6",{"_index":100,"t":{"11":{"position":[[419,3],[1109,3],[1291,3],[1295,3],[1380,4]]},"13":{"position":[[409,3],[1099,3],[1281,3],[1285,3],[1370,4]]},"112":{"position":[[1044,8]]},"269":{"position":[[1876,3],[1893,3]]},"273":{"position":[[2724,1]]},"383":{"position":[[571,2]]},"397":{"position":[[405,3],[1091,3],[1273,3],[1277,3],[1362,4]]},"409":{"position":[[366,1],[571,10],[595,1],[761,1]]}}}],["6.priority_queu",{"_index":844,"t":{"244":{"position":[[431,16]]}}}],["6层encod",{"_index":529,"t":{"142":{"position":[[16,11]]}}}],["7",{"_index":528,"t":{"142":{"position":[[0,15]]},"149":{"position":[[96,2],[450,2]]},"226":{"position":[[699,2]]},"269":{"position":[[1910,3],[1927,3]]},"332":{"position":[[1572,2],[1807,2]]},"340":{"position":[[950,2],[1191,2]]}}}],["7.14",{"_index":1297,"t":{"273":{"position":[[687,5]]}}}],["7.28",{"_index":1334,"t":{"273":{"position":[[1982,5],[2717,6]]}}}],["7.set",{"_index":846,"t":{"244":{"position":[[490,5]]}}}],["700",{"_index":2166,"t":{"454":{"position":[[428,4]]}}}],["77×7",{"_index":598,"t":{"163":{"position":[[122,11]]}}}],["77×7卷积与3×33",{"_index":596,"t":{"163":{"position":[[77,11]]}}}],["77×7卷积学习特征并降维,最后送入sigmoid",{"_index":594,"t":{"161":{"position":[[141,40]]}}}],["7×77",{"_index":595,"t":{"163":{"position":[[0,69]]}}}],["7个损失项是最终融合得到的featur",{"_index":534,"t":{"142":{"position":[[86,21]]}}}],["8",{"_index":307,"t":{"95":{"position":[[1666,1]]},"112":{"position":[[610,1]]},"309":{"position":[[0,10]]}}}],["8.multiset",{"_index":847,"t":{"244":{"position":[[514,10]]}}}],["84",{"_index":1939,"t":{"383":{"position":[[817,4]]}}}],["9",{"_index":108,"t":{"11":{"position":[[584,2],[587,2],[590,4]]},"13":{"position":[[574,2],[577,2],[580,4]]},"195":{"position":[[108,2]]},"332":{"position":[[1575,4],[1810,4]]},"397":{"position":[[570,2],[573,2],[576,4]]}}}],["9.map",{"_index":848,"t":{"244":{"position":[[543,5]]}}}],["9223372036854775807ll",{"_index":743,"t":{"226":{"position":[[559,22]]}}}],["92540646808111039ll",{"_index":746,"t":{"226":{"position":[[640,20]]}}}],["9999",{"_index":617,"t":{"195":{"position":[[83,5]]}}}],["9]])14label",{"_index":1728,"t":{"340":{"position":[[1194,11]]}}}],["9]])6predict",{"_index":1721,"t":{"340":{"position":[[953,12]]}}}],["__init__(self",{"_index":1916,"t":{"383":{"position":[[311,15],[447,15]]}}}],["__stack_chk_fail",{"_index":284,"t":{"95":{"position":[[1124,18]]}}}],["a(10,3",{"_index":1523,"t":{"291":{"position":[[12,8]]}}}],["a(4,3),b(3,4",{"_index":1527,"t":{"291":{"position":[[249,14]]}}}],["a)move(t,a",{"_index":182,"t":{"44":{"position":[[309,11]]}}}],["a,ba,ba,b",{"_index":1495,"t":{"285":{"position":[[200,40]]}}}],["a,const",{"_index":812,"t":{"233":{"position":[[64,7]]}}}],["a.argmax(dim=0",{"_index":128,"t":{"11":{"position":[[910,16]]},"13":{"position":[[900,16]]},"397":{"position":[[896,16]]}}}],["a.argmax(dim=1",{"_index":129,"t":{"11":{"position":[[927,16]]},"13":{"position":[[917,16]]},"397":{"position":[[913,16]]}}}],["a.assign(b.begin",{"_index":1010,"t":{"250":{"position":[[1009,19]]}}}],["a.assign(n",{"_index":1008,"t":{"250":{"position":[[970,11]]}}}],["a.back",{"_index":998,"t":{"250":{"position":[[598,8]]},"291":{"position":[[150,9]]},"297":{"position":[[77,9]]},"303":{"position":[[57,9]]}}}],["a.begin",{"_index":987,"t":{"250":{"position":[[209,9]]}}}],["a.clear",{"_index":996,"t":{"250":{"position":[[555,9]]},"291":{"position":[[114,10]]},"295":{"position":[[67,10]]},"303":{"position":[[35,10]]}}}],["a.empti",{"_index":994,"t":{"250":{"position":[[390,9]]},"291":{"position":[[80,10]]},"295":{"position":[[46,10]]},"297":{"position":[[24,10]]},"303":{"position":[[24,10]]}}}],["a.end",{"_index":988,"t":{"250":{"position":[[238,7]]}}}],["a.erase(first",{"_index":1005,"t":{"250":{"position":[[882,14]]}}}],["a.erase(it",{"_index":1004,"t":{"250":{"position":[[852,11]]}}}],["a.erase({\"1\",1",{"_index":1558,"t":{"307":{"position":[[86,17]]}}}],["a.find({\"1\",1",{"_index":1559,"t":{"307":{"position":[[104,16]]}}}],["a.first",{"_index":1531,"t":{"293":{"position":[[33,10]]}}}],["a.front",{"_index":997,"t":{"250":{"position":[[579,9]]},"291":{"position":[[131,10]]},"297":{"position":[[56,10]]},"303":{"position":[[46,10]]}}}],["a.h)<(b.h",{"_index":814,"t":{"233":{"position":[[89,13]]}}}],["a.insert(it",{"_index":1003,"t":{"250":{"position":[[685,12],[727,12],[773,12]]}}}],["a.insert({\"1\",1",{"_index":1557,"t":{"307":{"position":[[67,18]]}}}],["a.merge(b",{"_index":1001,"t":{"250":{"position":[[646,10]]}}}],["a.pop",{"_index":1539,"t":{"297":{"position":[[97,8]]}}}],["a.pop_back",{"_index":991,"t":{"250":{"position":[[326,12]]},"291":{"position":[[196,13]]},"303":{"position":[[82,13]]}}}],["a.pop_front",{"_index":992,"t":{"250":{"position":[[349,13]]}}}],["a.push(1",{"_index":1538,"t":{"297":{"position":[[35,10]]}}}],["a.push_back",{"_index":1526,"t":{"291":{"position":[[169,14]]},"303":{"position":[[67,14]]}}}],["a.push_back(x",{"_index":990,"t":{"250":{"position":[[299,14]]}}}],["a.push_front(x",{"_index":989,"t":{"250":{"position":[[271,15]]}}}],["a.remove(x",{"_index":1006,"t":{"250":{"position":[[941,11]]}}}],["a.resize(n",{"_index":995,"t":{"250":{"position":[[418,11],[485,11]]}}}],["a.second",{"_index":1532,"t":{"293":{"position":[[55,11]]}}}],["a.siz",{"_index":993,"t":{"250":{"position":[[373,8]]},"291":{"position":[[48,9]]},"295":{"position":[[21,9]]},"297":{"position":[[14,9]]},"303":{"position":[[14,9]]}}}],["a.swap(v",{"_index":999,"t":{"250":{"position":[[616,9]]}}}],["a<0",{"_index":1326,"t":{"273":{"position":[[1630,4]]}}}],["a[\"2",{"_index":1556,"t":{"307":{"position":[[55,6]]}}}],["abcdef",{"_index":1363,"t":{"273":{"position":[[2729,6]]}}}],["acc",{"_index":1983,"t":{"383":{"position":[[1765,5],[1777,6],[2498,3],[2524,3]]},"394":{"position":[[233,63]]}}}],["acc=∑i(predi==yi)len(y)(1)acc",{"_index":2029,"t":{"394":{"position":[[128,29]]}}}],["accur",{"_index":1888,"t":{"369":{"position":[[321,8]]}}}],["accuraci",{"_index":1958,"t":{"383":{"position":[[1254,13]]},"394":{"position":[[313,8]]}}}],["accuracy(y_hat",{"_index":1961,"t":{"383":{"position":[[1301,15],[2111,15]]}}}],["accuracy作为数学上的训练方法,即在训练过程中不使用与acc",{"_index":2028,"t":{"394":{"position":[[47,41]]}}}],["acc并无变化,出现梯度为0",{"_index":2034,"t":{"394":{"position":[[353,63]]}}}],["acc,但并不会将maxim",{"_index":2027,"t":{"394":{"position":[[0,46]]}}}],["aco",{"_index":732,"t":{"226":{"position":[[448,5]]}}}],["action=report",{"_index":331,"t":{"95":{"position":[[2066,13]]},"112":{"position":[[1010,13]]}}}],["acw",{"_index":1534,"t":{"295":{"position":[[11,9]]}}}],["addedge(int",{"_index":802,"t":{"231":{"position":[[787,11]]},"269":{"position":[[466,11]]},"271":{"position":[[662,11]]}}}],["adio",{"_index":367,"t":{"110":{"position":[[394,6],[1606,6]]}}}],["adjacent_find",{"_index":860,"t":{"246":{"position":[[62,13]]}}}],["adjacent)的等价(ident",{"_index":861,"t":{"246":{"position":[[76,32]]}}}],["adjlist",{"_index":1294,"t":{"273":{"position":[[636,7]]}}}],["adjlist[max_vertex_num",{"_index":1291,"t":{"273":{"position":[[555,24]]}}}],["adjv",{"_index":1286,"t":{"273":{"position":[[418,5],[2605,6]]}}}],["adjv=b;p",{"_index":1330,"t":{"273":{"position":[[1723,9]]}}}],["adjv]==0",{"_index":1357,"t":{"273":{"position":[[2577,10]]}}}],["ai",{"_index":2055,"t":{"404":{"position":[[0,21]]}}}],["ai论文】yolo",{"_index":544,"t":{"149":{"position":[[3,17]]}}}],["alexnet是指2012年由alex",{"_index":1895,"t":{"374":{"position":[[0,19]]}}}],["algorithm",{"_index":686,"t":{"222":{"position":[[42,9]]},"271":{"position":[[9,11]]}}}],["algraph",{"_index":1296,"t":{"273":{"position":[[650,9],[2816,7]]}}}],["all_ofc++11",{"_index":862,"t":{"246":{"position":[[109,11]]}}}],["all_proxi",{"_index":2100,"t":{"430":{"position":[[228,9]]}}}],["all_proxy=socks5://127.0.0.1:7890",{"_index":2095,"t":{"430":{"position":[[59,33]]}}}],["alpha",{"_index":150,"t":{"34":{"position":[[0,66],[116,6]]},"38":{"position":[[160,6]]}}}],["alpha,\\spac",{"_index":154,"t":{"34":{"position":[[102,13]]}}}],["alt",{"_index":665,"t":{"217":{"position":[[105,3],[125,3]]}}}],["anchor",{"_index":574,"t":{"151":{"position":[[22,7]]}}}],["anchor宽高比的聚类,聚类数越大,覆盖的i",{"_index":580,"t":{"151":{"position":[[150,43]]}}}],["anchor是通过k",{"_index":579,"t":{"151":{"position":[[115,10]]}}}],["anim",{"_index":1979,"t":{"383":{"position":[[1674,8]]}}}],["animator.add(epoch",{"_index":2006,"t":{"383":{"position":[[2291,18],[2416,18]]}}}],["announcementbar",{"_index":2149,"t":{"454":{"position":[[48,16],[71,16]]}}}],["any_ofc++11",{"_index":863,"t":{"246":{"position":[[144,11]]}}}],["append(),push_back",{"_index":1033,"t":{"252":{"position":[[728,20]]}}}],["applic",{"_index":2119,"t":{"439":{"position":[[41,11],[88,11]]}}}],["arch系用户通过以下命令即可完成bochs和nasm",{"_index":232,"t":{"93":{"position":[[55,31]]}}}],["arcnod",{"_index":1285,"t":{"273":{"position":[[405,8],[460,7],[475,9],[528,7],[1187,7],[1661,8],[2286,7]]}}}],["arcnum",{"_index":1300,"t":{"273":{"position":[[782,7],[1402,7],[1517,8]]}}}],["argmax",{"_index":110,"t":{"11":{"position":[[626,12],[946,6]]},"13":{"position":[[616,12],[936,6]]},"397":{"position":[[612,12],[932,6]]}}}],["argmin",{"_index":111,"t":{"11":{"position":[[641,6]]},"13":{"position":[[631,6]]},"397":{"position":[[627,6]]}}}],["arr_size(a",{"_index":720,"t":{"226":{"position":[[214,11]]}}}],["asciicod",{"_index":361,"t":{"110":{"position":[[272,9],[481,11],[1484,9],[1696,11]]}}}],["asciicode=='d')//ctrl+d",{"_index":365,"t":{"110":{"position":[[350,23],[1562,23]]}}}],["assert.h",{"_index":670,"t":{"220":{"position":[[21,8]]}}}],["assign",{"_index":1032,"t":{"252":{"position":[[690,10]]}}}],["attention应运而生,允许每个位置关注到序列中地所有其他位置。这种全局关联性质使得transform",{"_index":2069,"t":{"411":{"position":[[340,71]]}}}],["augment",{"_index":1902,"t":{"378":{"position":[[157,12]]}}}],["auto",{"_index":493,"t":{"126":{"position":[[1012,5]]}}}],["averag",{"_index":1886,"t":{"369":{"position":[[294,8]]}}}],["ax+b",{"_index":1778,"t":{"344":{"position":[[731,8]]}}}],["ax+bx",{"_index":1780,"t":{"344":{"position":[[792,5]]}}}],["a∈vn",{"_index":168,"t":{"38":{"position":[[109,5],[222,6]]}}}],["a与另一个list",{"_index":1000,"t":{"250":{"position":[[629,16]]}}}],["a中与范围b",{"_index":868,"t":{"246":{"position":[[271,23]]}}}],["a中所有值为x",{"_index":1007,"t":{"250":{"position":[[956,13]]}}}],["a中查找第一个与范围b",{"_index":877,"t":{"246":{"position":[[490,23]]}}}],["a中的所有元素替换成n个val",{"_index":1009,"t":{"250":{"position":[[990,18]]}}}],["a中第一个与范围b",{"_index":870,"t":{"246":{"position":[[309,26]]}}}],["a变成b",{"_index":1012,"t":{"250":{"position":[[1038,7]]}}}],["a是a班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6",{"_index":97,"t":{"11":{"position":[[339,47]]},"13":{"position":[[329,47]]},"397":{"position":[[325,47]]}}}],["a班4位同学,每位同学3",{"_index":84,"t":{"11":{"position":[[137,15]]},"13":{"position":[[127,15]]},"397":{"position":[[123,15]]}}}],["a而言,假设有一组互斥且穷尽的条件事件b,则事件a的概率等于事件a",{"_index":501,"t":{"134":{"position":[[194,65]]},"181":{"position":[[194,65]]}}}],["a,若存在方阵b使得ab=ba=单位方阵i,则方阵b为方阵a的逆矩阵,记为a−1a",{"_index":610,"t":{"179":{"position":[[69,52]]},"189":{"position":[[69,52]]}}}],["b",{"_index":85,"t":{"11":{"position":[[153,1],[212,3],[326,3],[530,1],[1085,1],[1124,2],[1266,1],[1318,2]]},"13":{"position":[[143,1],[202,3],[316,3],[520,1],[1075,1],[1114,2],[1256,1],[1308,2]]},"134":{"position":[[550,1],[613,1],[664,1]]},"181":{"position":[[550,1],[613,1],[664,1]]},"205":{"position":[[94,2],[122,3],[136,2]]},"236":{"position":[[125,2]]},"269":{"position":[[89,2],[99,3],[111,3]]},"271":{"position":[[91,2],[101,3],[113,3]]},"273":{"position":[[1601,1],[2780,1],[2787,2],[2892,2]]},"291":{"position":[[271,2]]},"338":{"position":[[113,2]]},"344":{"position":[[149,2]]},"362":{"position":[[48,2]]},"364":{"position":[[69,2],[250,2]]},"397":{"position":[[139,1],[198,3],[312,3],[516,1],[1067,1],[1106,2],[1248,1],[1300,2]]},"402":{"position":[[574,1],[628,1],[632,1]]}}}],["b){return",{"_index":813,"t":{"233":{"position":[[79,9]]}}}],["b)}{\\partial",{"_index":1852,"t":{"364":{"position":[[319,12]]}}}],["b,a",{"_index":1364,"t":{"273":{"position":[[2736,3]]}}}],["b,c,h,w][b",{"_index":585,"t":{"157":{"position":[[0,23],[117,29]]},"159":{"position":[[0,17]]},"161":{"position":[[0,17]]}}}],["b,d",{"_index":1365,"t":{"273":{"position":[[2740,3]]}}}],["b.end",{"_index":1011,"t":{"250":{"position":[[1029,8]]}}}],["b<0",{"_index":1327,"t":{"273":{"position":[[1638,4]]}}}],["b[i],e[i](0<=b[i]<=e[i]<=200000",{"_index":621,"t":{"203":{"position":[[103,56]]}}}],["b_t)}{\\partial",{"_index":1857,"t":{"364":{"position":[[481,14]]}}}],["background",{"_index":2154,"t":{"454":{"position":[[213,11]]}}}],["backward",{"_index":74,"t":{"9":{"position":[[278,13]]}}}],["base",{"_index":1269,"t":{"273":{"position":[[45,4],[58,4]]}}}],["bash的配置文件:~/.bashrc",{"_index":2105,"t":{"432":{"position":[[55,19]]}}}],["basic",{"_index":1870,"t":{"367":{"position":[[108,5]]}}}],["batch",{"_index":68,"t":{"9":{"position":[[140,5]]},"383":{"position":[[1910,6],[2224,6],[2265,5],[2312,6]]}}}],["batch_siz",{"_index":2017,"t":{"383":{"position":[[2639,10]]}}}],["batchsize越小,收敛效果越好。随机梯度下降理论上带来了噪音,batchs",{"_index":71,"t":{"9":{"position":[[164,68]]}}}],["batch上计算损失函数以及梯度,近似损失。此时,batchs",{"_index":65,"t":{"9":{"position":[[0,96]]}}}],["batch中有大量样本均存在这种情况,此时acc有显著提升而网络的权重的更新极小,此时,与acc有关的loss",{"_index":2039,"t":{"394":{"position":[[489,129]]}}}],["batch数据,即mini",{"_index":67,"t":{"9":{"position":[[126,13]]}}}],["batteri",{"_index":2132,"t":{"439":{"position":[[201,7]]}}}],["bbb",{"_index":1844,"t":{"362":{"position":[[181,10]]},"402":{"position":[[178,38]]}}}],["bbox与其他所有置信度更小的bbox做iou判断,若iou大于设置的阈值,则抹除置信度小的bbox",{"_index":569,"t":{"149":{"position":[[619,59]]}}}],["bbox包含(x",{"_index":557,"t":{"149":{"position":[[289,11]]}}}],["bbox的置信度与其父grid",{"_index":567,"t":{"149":{"position":[[547,17]]}}}],["bbox都会在loss",{"_index":571,"t":{"149":{"position":[[737,19]]}}}],["bc",{"_index":2169,"t":{"460":{"position":[[0,14]]}}}],["be",{"_index":240,"t":{"95":{"position":[[330,5]]},"369":{"position":[[209,5]]}}}],["beg,end]内所有字符作为字符串",{"_index":1031,"t":{"252":{"position":[[649,26]]}}}],["begin",{"_index":632,"t":{"205":{"position":[[303,6],[337,7],[357,6],[406,6]]}}}],["begin(),end",{"_index":1049,"t":{"252":{"position":[[1135,13]]}}}],["begin[i]+1",{"_index":624,"t":{"203":{"position":[[288,37]]}}}],["begin[i],end[i](0<=begin[i]<=end[i]<=200000",{"_index":622,"t":{"203":{"position":[[189,66]]}}}],["begin{cas",{"_index":31,"t":{"3":{"position":[[462,13],[598,13]]},"5":{"position":[[454,13],[590,13]]},"19":{"position":[[462,13],[598,13]]},"21":{"position":[[454,13],[590,13]]},"327":{"position":[[24,13],[160,13]]}}}],["behind",{"_index":1872,"t":{"367":{"position":[[119,6]]}}}],["beta",{"_index":162,"t":{"36":{"position":[[43,5],[82,16]]},"38":{"position":[[183,5]]}}}],["beta,\\spac",{"_index":161,"t":{"36":{"position":[[30,12]]},"38":{"position":[[147,12]]}}}],["better",{"_index":2137,"t":{"439":{"position":[[310,6]]}}}],["bfs(int",{"_index":1163,"t":{"269":{"position":[[727,7]]},"276":{"position":[[1078,7]]},"278":{"position":[[780,7]]}}}],["bfs(rt",{"_index":1455,"t":{"276":{"position":[[2002,8]]},"278":{"position":[[1617,8]]}}}],["bia",{"_index":1689,"t":{"338":{"position":[[116,9]]}}}],["big",{"_index":1851,"t":{"364":{"position":[[291,5]]},"437":{"position":[[101,3]]}}}],["big(l(w",{"_index":1850,"t":{"364":{"position":[[240,9]]}}}],["binari",{"_index":62,"t":{"7":{"position":[[303,6]]},"23":{"position":[[303,6]]}}}],["binary_search",{"_index":920,"t":{"246":{"position":[[1908,13]]}}}],["bit",{"_index":694,"t":{"222":{"position":[[152,4]]}}}],["bitbit",{"_index":1123,"t":{"266":{"position":[[65,6],[101,6]]}}}],["bits/stdc++.h",{"_index":481,"t":{"126":{"position":[[49,15]]},"195":{"position":[[9,15]]},"205":{"position":[[9,15]]},"211":{"position":[[9,15]]}}}],["bitset",{"_index":693,"t":{"222":{"position":[[145,6]]},"266":{"position":[[0,10],[26,13]]},"289":{"position":[[294,9]]}}}],["bitset<10000",{"_index":1561,"t":{"309":{"position":[[11,13]]}}}],["bitset>n>>m",{"_index":1215,"t":{"271":{"position":[[1086,10]]}}}],["cin>>row",{"_index":821,"t":{"238":{"position":[[29,9]]}}}],["cin>>row>>col",{"_index":827,"t":{"240":{"position":[[0,14]]}}}],["cin>>src>>dst",{"_index":1219,"t":{"271":{"position":[[1140,14]]}}}],["ci×h×wc_i",{"_index":1572,"t":{"321":{"position":[[0,12]]}}}],["class",{"_index":64,"t":{"7":{"position":[[322,5]]},"23":{"position":[[322,5]]},"383":{"position":[[276,5],[418,5]]}}}],["classifi",{"_index":2053,"t":{"402":{"position":[[656,10]]}}}],["clear",{"_index":1036,"t":{"252":{"position":[[781,7]]}}}],["clock",{"_index":2138,"t":{"439":{"position":[[324,5]]}}}],["closure(t)\\epsilon",{"_index":183,"t":{"44":{"position":[[321,22]]}}}],["closure(t)ϵ−closure(t",{"_index":184,"t":{"44":{"position":[[345,22]]}}}],["cloud",{"_index":465,"t":{"118":{"position":[[157,7]]}}}],["cnn",{"_index":2064,"t":{"411":{"position":[[0,23]]}}}],["cnn中没有全连接层时,本质上可以接受任意尺寸的输入,但这是狭隘的。若考虑其下游任务以及输出,如fcn(fulli",{"_index":2058,"t":{"409":{"position":[[124,68]]}}}],["cnn使用卷积层通过滑动卷积核在输入上进行局部感受野的操作。每个神经元只与输入的一小部分区域相连,这意味着每个神经元只能接触到局部的上下文信息。这样的设计使得cnn",{"_index":2065,"t":{"411":{"position":[[24,111]]}}}],["cnn在面临长输入序列时不能很好地综合上下文信息、提取位置信息,因此self",{"_index":2068,"t":{"411":{"position":[[288,51]]}}}],["cnn本质上可以接受任意通道数的图像输入,但是其模型效果将会受到极大的影响。以一个使用通道数为3的数据集进行训练的cnn",{"_index":2061,"t":{"409":{"position":[[278,83]]}}}],["cnn模型的输入向量的形状是固定的,其输出向量的形状也是固定的或可以根据不同的下游任务而唯一确定,即输入形状与下游任务共同确定了一个cnn",{"_index":2057,"t":{"409":{"position":[[0,84]]}}}],["cnn的参数共享使得模型能够学习到图像中的局部特征,这也是一种对于上下文的假设。相邻位置上的权重共享使得模型能够对局部结构进行建模,并且这种权重共享使得cnn",{"_index":2066,"t":{"411":{"position":[[142,89]]}}}],["cnn的设计理念认为:在图像任务中,局部结构通常更为重要,局部连接和权值共享使得cnn",{"_index":2067,"t":{"411":{"position":[[232,55]]}}}],["cnt",{"_index":1376,"t":{"273":{"position":[[2811,4],[2985,6]]}}}],["cnt0−1w<0\\frac{d|w|}{d",{"_index":1700,"t":{"340":{"position":[[441,30]]}}}],["e",{"_index":5,"t":{"3":{"position":[[70,3]]},"5":{"position":[[62,3]]},"19":{"position":[[70,3]]},"21":{"position":[[62,3]]},"205":{"position":[[97,2],[126,4],[147,1]]},"217":{"position":[[64,1]]},"273":{"position":[[307,2],[325,3],[952,2],[957,12],[1028,2]]},"325":{"position":[[39,3]]},"344":{"position":[[808,37]]},"430":{"position":[[181,1],[279,1]]}}}],["e,a",{"_index":1371,"t":{"273":{"position":[[2764,3]]}}}],["each",{"_index":1878,"t":{"367":{"position":[[285,4]]},"369":{"position":[[3,4],[199,4],[267,4]]}}}],["echo",{"_index":441,"t":{"110":{"position":[[2179,4]]},"430":{"position":[[175,4],[273,4]]}}}],["edit",{"_index":297,"t":{"95":{"position":[[1468,4]]},"112":{"position":[[412,4]]}}}],["elf_i386",{"_index":281,"t":{"95":{"position":[[1088,8]]}}}],["empti",{"_index":1041,"t":{"252":{"position":[[920,7]]}}}],["enabled=0",{"_index":320,"t":{"95":{"position":[[1840,9],[1868,9],[1895,9]]},"112":{"position":[[784,9],[812,9],[839,9]]}}}],["encod",{"_index":523,"t":{"140":{"position":[[192,19]]}}}],["encoder阶段,每个block之后使用maxpool",{"_index":520,"t":{"140":{"position":[[41,37]]}}}],["encrypt",{"_index":609,"t":{"172":{"position":[[22,10]]},"174":{"position":[[33,10],[77,10]]},"176":{"position":[[33,10],[77,10]]}}}],["end",{"_index":633,"t":{"205":{"position":[[310,4],[345,6],[372,3],[418,4]]},"244":{"position":[[164,5]]},"305":{"position":[[136,22]]},"402":{"position":[[639,3]]}}}],["end[i",{"_index":623,"t":{"203":{"position":[[269,18]]}}}],["endif",{"_index":1274,"t":{"273":{"position":[[203,6]]}}}],["endl",{"_index":496,"t":{"126":{"position":[[1088,5]]},"195":{"position":[[193,5]]},"211":{"position":[[352,5],[391,5]]},"248":{"position":[[1646,5]]},"269":{"position":[[1700,5],[2046,5],[2341,5]]},"295":{"position":[[122,5]]}}}],["end{aligned}\\right",{"_index":1748,"t":{"340":{"position":[[1828,20]]}}}],["end{aligned}l​=(y^​−y)2=(wx+b−y)2",{"_index":1696,"t":{"340":{"position":[[193,35]]}}}],["end{aligned}wnew",{"_index":1737,"t":{"340":{"position":[[1430,17],[1849,17],[2119,17]]}}}],["end{align}h(p",{"_index":1648,"t":{"332":{"position":[[1313,15]]}}}],["end{align}h(p)​=−i∑n​pi",{"_index":1615,"t":{"332":{"position":[[411,25]]}}}],["end{array}\\right.dwd∣w∣​={1−1​w>0w<0",{"_index":1704,"t":{"340":{"position":[[520,38]]}}}],["end{array}\\right.wnew",{"_index":1764,"t":{"342":{"position":[[266,22]]}}}],["end{cas",{"_index":33,"t":{"3":{"position":[[503,11],[639,11]]},"5":{"position":[[495,11],[631,11]]},"19":{"position":[[503,11],[639,11]]},"21":{"position":[[495,11],[631,11]]},"327":{"position":[[65,11],[201,11]]}}}],["entri",{"_index":377,"t":{"110":{"position":[[654,5],[1737,5]]}}}],["entropi",{"_index":60,"t":{"7":{"position":[[284,7]]},"23":{"position":[[284,7]]}}}],["entropy),是描述两个概率分布p和q",{"_index":1621,"t":{"332":{"position":[[539,29]]}}}],["entropy中的entropi",{"_index":1598,"t":{"332":{"position":[[11,50]]}}}],["entrpoy",{"_index":1603,"t":{"332":{"position":[[97,7]]}}}],["enumerate(train_it",{"_index":1992,"t":{"383":{"position":[[1927,22]]}}}],["enum{dg",{"_index":1282,"t":{"273":{"position":[[365,8]]}}}],["ep",{"_index":734,"t":{"226":{"position":[[467,3]]}}}],["epoch",{"_index":1990,"t":{"383":{"position":[[1878,5]]},"424":{"position":[[313,8]]}}}],["equal",{"_index":865,"t":{"246":{"position":[[224,5]]}}}],["equal_rang",{"_index":921,"t":{"246":{"position":[[1942,11]]}}}],["eras",{"_index":1035,"t":{"252":{"position":[[768,7]]}}}],["errno.h",{"_index":672,"t":{"220":{"position":[[50,7]]}}}],["error",{"_index":47,"t":{"7":{"position":[[13,5]]},"23":{"position":[[13,5]]},"95":{"position":[[347,6]]},"273":{"position":[[115,5],[1329,6],[1392,6],[1650,6]]}}}],["estim",{"_index":1889,"t":{"369":{"position":[[330,8]]}}}],["eta",{"_index":1712,"t":{"340":{"position":[[783,4],[1363,4],[1405,4],[1608,4],[1654,4],[1745,4],[1789,4],[2036,4],[2082,4]]},"364":{"position":[[435,4],[454,4]]}}}],["euclidean",{"_index":1678,"t":{"336":{"position":[[232,9]]}}}],["evalu",{"_index":1867,"t":{"367":{"position":[[67,8]]}}}],["evaluate_accuracy_gpu(net",{"_index":1943,"t":{"383":{"position":[[900,26],[2378,26]]}}}],["exactli",{"_index":1883,"t":{"369":{"position":[[220,7]]}}}],["exampl",{"_index":294,"t":{"95":{"position":[[1426,7]]},"112":{"position":[[370,7]]},"254":{"position":[[90,30]]}}}],["examples/sec",{"_index":2014,"t":{"383":{"position":[[2596,12]]}}}],["excit",{"_index":602,"t":{"167":{"position":[[12,10]]}}}],["excitation激励操作就是通过sigmoid",{"_index":606,"t":{"167":{"position":[[153,42]]}}}],["execut",{"_index":261,"t":{"95":{"position":[[788,7]]}}}],["exit",{"_index":350,"t":{"110":{"position":[[109,4],[1321,4]]}}}],["exit(0",{"_index":448,"t":{"110":{"position":[[2343,8]]}}}],["exit(1",{"_index":369,"t":{"110":{"position":[[426,8],[1641,8]]}}}],["exit(overflow",{"_index":1329,"t":{"273":{"position":[[1705,15]]}}}],["export",{"_index":2094,"t":{"430":{"position":[[52,6],[93,6],[133,6]]}}}],["extra_c_opt",{"_index":290,"t":{"95":{"position":[[1271,15],[1341,15]]}}}],["f",{"_index":19,"t":{"3":{"position":[[313,1],[717,1]]},"5":{"position":[[305,1],[709,1]]},"19":{"position":[[313,1],[717,1]]},"21":{"position":[[305,1],[709,1]]},"266":{"position":[[313,7]]},"280":{"position":[[237,2],[573,3]]},"325":{"position":[[290,1]]},"327":{"position":[[279,1]]},"383":{"position":[[94,1]]}}}],["f(x",{"_index":55,"t":{"7":{"position":[[200,4]]},"23":{"position":[[200,4]]},"330":{"position":[[176,4]]}}}],["f(x)={0x<0xx≥0(1)f(x",{"_index":30,"t":{"3":{"position":[[438,21]]},"5":{"position":[[430,21]]},"19":{"position":[[438,21]]},"21":{"position":[[430,21]]}}}],["f(x)={0x<0xx≥0(3)f(x",{"_index":1590,"t":{"327":{"position":[[0,21]]}}}],["f(x)]^2",{"_index":51,"t":{"7":{"position":[[126,9],[234,9]]},"23":{"position":[[126,9],[234,9]]},"330":{"position":[[102,9],[210,9]]}}}],["f,a",{"_index":1372,"t":{"273":{"position":[[2768,3]]}}}],["f,b",{"_index":1373,"t":{"273":{"position":[[2772,3]]}}}],["f,e",{"_index":1374,"t":{"273":{"position":[[2776,3]]}}}],["f.relu(x",{"_index":39,"t":{"3":{"position":[[766,9]]},"5":{"position":[[758,9]]},"19":{"position":[[766,9]]},"21":{"position":[[758,9]]},"327":{"position":[[328,9]]}}}],["f.sigmoid(x",{"_index":24,"t":{"3":{"position":[[362,12]]},"5":{"position":[[354,12]]},"19":{"position":[[362,12]]},"21":{"position":[[354,12]]},"325":{"position":[[339,12]]}}}],["f1",{"_index":666,"t":{"217":{"position":[[131,2],[146,2]]}}}],["f10",{"_index":668,"t":{"217":{"position":[[269,3]]}}}],["f11",{"_index":669,"t":{"217":{"position":[[280,3]]}}}],["f12",{"_index":664,"t":{"217":{"position":[[73,3],[89,3]]}}}],["f5",{"_index":667,"t":{"217":{"position":[[245,2],[260,2]]}}}],["f=0",{"_index":1485,"t":{"280":{"position":[[623,4]]}}}],["f=1",{"_index":1491,"t":{"280":{"position":[[801,4]]}}}],["fals",{"_index":807,"t":{"231":{"position":[[862,7]]},"269":{"position":[[540,6],[1327,6],[1753,7],[2087,7]]},"271":{"position":[[736,6],[2257,20]]},"273":{"position":[[86,5]]},"454":{"position":[[141,6]]}}}],["fa根据当前的状态及扫描的输入字符,便能唯一地知道fa",{"_index":176,"t":{"44":{"position":[[24,49]]}}}],["fcn",{"_index":582,"t":{"153":{"position":[[234,9]]}}}],["file",{"_index":229,"t":{"93":{"position":[[11,6]]},"95":{"position":[[1228,5],[1443,5]]},"110":{"position":[[986,4]]},"112":{"position":[[387,5]]}}}],["file=/usr/local/share/bochs/bio",{"_index":305,"t":{"95":{"position":[[1597,32]]},"112":{"position":[[541,32]]}}}],["file=/usr/local/share/bochs/vgabio",{"_index":301,"t":{"95":{"position":[[1522,35]]},"112":{"position":[[466,35]]}}}],["filesystem",{"_index":418,"t":{"110":{"position":[[1790,12]]}}}],["fill",{"_index":882,"t":{"246":{"position":[[694,4]]}}}],["fill_n",{"_index":883,"t":{"246":{"position":[[714,6]]}}}],["filter都对输入图像的所有通道完成一次卷积,filter中的kernel",{"_index":1816,"t":{"352":{"position":[[46,59]]}}}],["final",{"_index":1894,"t":{"371":{"position":[[109,5]]}}}],["find",{"_index":866,"t":{"246":{"position":[[241,4]]}}}],["find_end",{"_index":867,"t":{"246":{"position":[[262,8]]}}}],["find_first_of",{"_index":869,"t":{"246":{"position":[[295,13]]}}}],["find_if",{"_index":871,"t":{"246":{"position":[[336,7]]}}}],["find_if_notc++11",{"_index":872,"t":{"246":{"position":[[360,16]]}}}],["findallpath(algraph",{"_index":1340,"t":{"273":{"position":[[2220,19]]}}}],["findallpath(g",{"_index":1358,"t":{"273":{"position":[[2588,14],[3068,14]]}}}],["finder小组件中appl",{"_index":2113,"t":{"437":{"position":[[273,21]]}}}],["finder栏中plasmoid",{"_index":2116,"t":{"439":{"position":[[0,29]]}}}],["find,拷贝copy,删除erase,替换replace,插入insert",{"_index":1017,"t":{"252":{"position":[[287,41]]}}}],["first",{"_index":759,"t":{"229":{"position":[[24,6]]},"231":{"position":[[280,6]]},"248":{"position":[[765,6]]},"250":{"position":[[786,6]]},"276":{"position":[[178,6]]},"278":{"position":[[178,6]]}}}],["first(该非终结符)减去ϵ\\epsilonϵ的所有终结符元素都加入至follow",{"_index":188,"t":{"51":{"position":[[154,52]]}}}],["first=1",{"_index":1448,"t":{"276":{"position":[[1745,8]]},"278":{"position":[[1410,8]]}}}],["firstarc",{"_index":1290,"t":{"273":{"position":[[536,10]]}}}],["first和last",{"_index":986,"t":{"250":{"position":[[150,47]]}}}],["first和last所指定的序列[first",{"_index":963,"t":{"248":{"position":[[781,28],[884,29],[1177,33]]},"250":{"position":[[802,28],[905,29]]}}}],["first集、follow集是针对于符号串而言的,而select",{"_index":189,"t":{"53":{"position":[[3,49]]}}}],["fish的配置文件:~/.config/fish/config.fish",{"_index":2103,"t":{"432":{"position":[[0,36]]}}}],["flag",{"_index":285,"t":{"95":{"position":[[1200,5]]},"211":{"position":[[93,4],[236,4],[322,5]]}}}],["flip",{"_index":1567,"t":{"309":{"position":[[183,7]]}}}],["float",{"_index":764,"t":{"229":{"position":[[59,5]]},"236":{"position":[[178,5],[191,5]]}}}],["float.h",{"_index":673,"t":{"220":{"position":[[65,7]]}}}],["floppya",{"_index":309,"t":{"95":{"position":[[1676,8],[1714,9]]},"112":{"position":[[620,8],[658,9]]}}}],["fno",{"_index":291,"t":{"95":{"position":[[1321,3]]}}}],["focal",{"_index":599,"t":{"165":{"position":[[0,5]]}}}],["fold",{"_index":1862,"t":{"367":{"position":[[2,4],[128,4],[198,6],[268,4]]},"369":{"position":[[23,4],[61,5],[122,5],[204,4]]}}}],["follow",{"_index":186,"t":{"51":{"position":[[3,20],[24,29],[129,24]]}}}],["follow集加入到该非终结符的follow",{"_index":187,"t":{"51":{"position":[[88,40]]}}}],["follow集解决的话则是slr(1",{"_index":207,"t":{"75":{"position":[[221,29]]}}}],["font",{"_index":2165,"t":{"454":{"position":[[415,4]]}}}],["for(i,f_start,f_end",{"_index":717,"t":{"226":{"position":[[141,20]]}}}],["for(int",{"_index":718,"t":{"226":{"position":[[162,7]]},"269":{"position":[[901,7],[1334,7],[1981,7],[2270,7]]},"271":{"position":[[1828,7],[1956,7]]},"276":{"position":[[1275,7],[1754,7],[1806,7],[1855,7]]},"278":{"position":[[977,7],[1419,7],[1463,7],[1504,7]]},"280":{"position":[[690,7],[732,7]]}}}],["for_each",{"_index":873,"t":{"246":{"position":[[394,8]]}}}],["fork",{"_index":2128,"t":{"439":{"position":[[179,6]]}}}],["forward",{"_index":72,"t":{"9":{"position":[[233,12]]}}}],["forward(self",{"_index":1919,"t":{"383":{"position":[[368,13],[859,13]]}}}],["found",{"_index":1182,"t":{"269":{"position":[[1266,6],[1319,5],[1551,5],[1579,8]]}}}],["four",{"_index":765,"t":{"229":{"position":[[65,5]]}}}],["four:0.25",{"_index":780,"t":{"231":{"position":[[291,9]]}}}],["frac{1}{1",{"_index":4,"t":{"3":{"position":[[57,10]]},"5":{"position":[[49,10]]},"19":{"position":[[57,10]]},"21":{"position":[[49,10]]},"325":{"position":[[26,10]]}}}],["frac{\\lambda}{2",{"_index":1847,"t":{"364":{"position":[[74,17],[255,17]]}}}],["frac{\\parti",{"_index":1713,"t":{"340":{"position":[[788,14],[1368,14],[1613,14],[2041,14]]},"364":{"position":[[299,14],[459,14]]}}}],["frac{\\sum{i(pred_i==y_i)}}{len(i",{"_index":2030,"t":{"394":{"position":[[160,35]]}}}],["frac{d|w|}{d",{"_index":1744,"t":{"340":{"position":[[1691,13]]}}}],["frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j",{"_index":43,"t":{"3":{"position":[[849,37]]},"5":{"position":[[841,37]]},"19":{"position":[[849,37]]},"21":{"position":[[841,37]]}}}],["frac{p(b|a",{"_index":503,"t":{"134":{"position":[[469,12]]},"181":{"position":[[469,12]]}}}],["frac{shape_{input",{"_index":1508,"t":{"287":{"position":[[343,19]]},"321":{"position":[[273,19]]}}}],["free",{"_index":166,"t":{"38":{"position":[[54,4]]},"110":{"position":[[895,4]]}}}],["freebsd",{"_index":267,"t":{"95":{"position":[[891,8]]}}}],["front",{"_index":1514,"t":{"289":{"position":[[72,8]]}}}],["function",{"_index":18,"t":{"3":{"position":[[299,10],[703,10]]},"5":{"position":[[291,10],[695,10]]},"19":{"position":[[299,10],[703,10]]},"21":{"position":[[291,10],[695,10]]},"325":{"position":[[276,10]]},"327":{"position":[[265,10]]},"383":{"position":[[80,10]]},"430":{"position":[[31,8],[200,8]]}}}],["function中起到更新的作用,因此不进行nm",{"_index":572,"t":{"149":{"position":[[757,25]]}}}],["g",{"_index":1166,"t":{"269":{"position":[[774,2],[1153,2]]},"271":{"position":[[1070,2]]},"273":{"position":[[938,2],[2240,2],[2824,2]]}}}],["g(8",{"_index":806,"t":{"231":{"position":[[857,4]]},"269":{"position":[[1748,4]]}}}],["g.addedge(0",{"_index":1188,"t":{"269":{"position":[[1761,12],[1778,12]]}}}],["g.addedge(1",{"_index":1189,"t":{"269":{"position":[[1795,12]]}}}],["g.addedge(3",{"_index":1190,"t":{"269":{"position":[[1812,12],[1829,12]]}}}],["g.addedge(4",{"_index":1191,"t":{"269":{"position":[[1846,12],[1863,12]]}}}],["g.addedge(5",{"_index":1192,"t":{"269":{"position":[[1880,12],[1897,12]]}}}],["g.addedge(6",{"_index":1193,"t":{"269":{"position":[[1914,12]]}}}],["g.addedge(src,dst",{"_index":1220,"t":{"271":{"position":[[1155,19]]}}}],["g.bfs(0",{"_index":1195,"t":{"269":{"position":[[1948,9]]}}}],["g.erase(g.begin",{"_index":1172,"t":{"269":{"position":[[862,19]]}}}],["g.pop_back",{"_index":1187,"t":{"269":{"position":[[1672,13]]}}}],["g.push_back(id1",{"_index":1177,"t":{"269":{"position":[[1026,17],[1459,17]]}}}],["g.push_back(start",{"_index":1168,"t":{"269":{"position":[[782,19],[1161,19]]}}}],["g.vers[i].data",{"_index":1309,"t":{"273":{"position":[[1009,15]]}}}],["g.vers[path[i]].data",{"_index":1351,"t":{"273":{"position":[[2429,21]]}}}],["g.vertexs.begin",{"_index":1222,"t":{"271":{"position":[[1207,18]]}}}],["g.vertexs[u].connectors.clear();//清空u",{"_index":1252,"t":{"271":{"position":[[2130,42]]}}}],["g1(6",{"_index":1198,"t":{"269":{"position":[[2081,5]]}}}],["g1.addedge(0",{"_index":1199,"t":{"269":{"position":[[2095,13],[2113,13],[2131,13]]}}}],["g1.addedge(1",{"_index":1200,"t":{"269":{"position":[[2149,13]]}}}],["g1.addedge(2",{"_index":1203,"t":{"269":{"position":[[2221,13]]}}}],["g1.addedge(4",{"_index":1201,"t":{"269":{"position":[[2167,13]]}}}],["g1.addedge(5",{"_index":1202,"t":{"269":{"position":[[2185,13],[2203,13]]}}}],["g1.dfs(0",{"_index":1205,"t":{"269":{"position":[[2259,10]]}}}],["g=graph(n",{"_index":1216,"t":{"271":{"position":[[1097,10]]}}}],["g[0",{"_index":1171,"t":{"269":{"position":[[856,5]]}}}],["g[g.size",{"_index":1183,"t":{"269":{"position":[[1304,10],[1600,10]]}}}],["gcc",{"_index":251,"t":{"95":{"position":[[654,3],[973,3]]}}}],["geeko",{"_index":212,"t":{"85":{"position":[[110,12],[123,7],[206,25],[270,16]]},"87":{"position":[[70,15]]},"89":{"position":[[0,18]]},"93":{"position":[[0,10],[34,14]]},"95":{"position":[[34,8],[360,8],[584,8],[1149,8],[1371,7],[2132,15]]},"110":{"position":[[0,8],[640,6]]},"112":{"position":[[0,16]]}}}],["geekos!\\n",{"_index":438,"t":{"110":{"position":[[2093,12]]}}}],["geekos/bootinfo.h",{"_index":403,"t":{"110":{"position":[[1014,19]]}}}],["geekos/crc32.h",{"_index":407,"t":{"110":{"position":[[1121,16]]}}}],["geekos/int.h",{"_index":409,"t":{"110":{"position":[[1171,14]]}}}],["geekos/keyboard.h",{"_index":413,"t":{"110":{"position":[[1274,19]]}}}],["geekos/kthread.h",{"_index":410,"t":{"110":{"position":[[1195,18]]}}}],["geekos/mem.h",{"_index":406,"t":{"110":{"position":[[1097,14]]}}}],["geekos/screen.h",{"_index":405,"t":{"110":{"position":[[1070,17]]}}}],["geekos/string.h",{"_index":404,"t":{"110":{"position":[[1043,17]]}}}],["geekos/timer.h",{"_index":412,"t":{"110":{"position":[[1248,16]]}}}],["geekos/trap.h",{"_index":411,"t":{"110":{"position":[[1223,15]]}}}],["geekos/tss.h",{"_index":408,"t":{"110":{"position":[[1147,14]]}}}],["geekos中makefil",{"_index":238,"t":{"95":{"position":[[193,23],[238,19]]}}}],["geekos是一个基于x86",{"_index":209,"t":{"85":{"position":[[0,27]]}}}],["geekos环境的配置,下面我们来验证环境配置的成功与否以及project",{"_index":345,"t":{"108":{"position":[[0,49]]}}}],["geekos设计的7",{"_index":215,"t":{"85":{"position":[[174,20]]}}}],["geekos运行依托于boch",{"_index":222,"t":{"87":{"position":[[21,17]]}}}],["gener",{"_index":268,"t":{"95":{"position":[[907,9]]},"246":{"position":[[742,8]]}}}],["general_opt",{"_index":245,"t":{"95":{"position":[[490,15],[541,15],[1246,12],[1295,12]]}}}],["generate_n",{"_index":884,"t":{"246":{"position":[[786,10]]}}}],["geq",{"_index":32,"t":{"3":{"position":[[494,4],[630,4]]},"5":{"position":[[486,4],[622,4]]},"19":{"position":[[494,4],[630,4]]},"21":{"position":[[486,4],[622,4]]},"327":{"position":[[56,4],[192,4]]}}}],["getheight(int",{"_index":1422,"t":{"276":{"position":[[970,13]]},"278":{"position":[[680,13]]}}}],["getlin",{"_index":1044,"t":{"252":{"position":[[1007,12]]}}}],["global",{"_index":2125,"t":{"439":{"position":[[136,6]]}}}],["global_step",{"_index":2082,"t":{"424":{"position":[[232,14]]},"426":{"position":[[198,14]]}}}],["gnu",{"_index":273,"t":{"95":{"position":[[999,3]]}}}],["gradient",{"_index":69,"t":{"9":{"position":[[146,8]]},"394":{"position":[[297,8],[417,8]]},"454":{"position":[[242,9]]}}}],["grammar,cfg",{"_index":167,"t":{"38":{"position":[[59,12]]}}}],["grammar,rg",{"_index":174,"t":{"38":{"position":[[265,11]]}}}],["graph",{"_index":790,"t":{"231":{"position":[[524,15],[555,5],[718,7],[844,6],[851,5]]},"269":{"position":[[258,15],[289,5],[1722,6],[1742,5],[2075,5]]},"271":{"position":[[399,15],[430,5],[593,7],[1057,6],[1064,5]]}}}],["graph(int",{"_index":795,"t":{"231":{"position":[[642,9]]},"269":{"position":[[376,9]]},"271":{"position":[[517,9]]}}}],["graphkind",{"_index":1284,"t":{"273":{"position":[[379,10],[601,9]]}}}],["grate",{"_index":454,"t":{"118":{"position":[[3,8],[61,8],[112,8]]}}}],["gray",{"_index":439,"t":{"110":{"position":[[2137,7]]}}}],["green|bright",{"_index":436,"t":{"110":{"position":[[2059,15]]}}}],["grid",{"_index":553,"t":{"149":{"position":[[206,6],[368,8],[471,6]]}}}],["h",{"_index":382,"t":{"110":{"position":[[703,2]]},"149":{"position":[[304,2]]},"157":{"position":[[27,2],[150,2],[193,2]]},"159":{"position":[[21,2]]},"161":{"position":[[21,2],[94,2]]},"167":{"position":[[53,2]]},"233":{"position":[[26,3]]},"273":{"position":[[1551,1],[1597,3]]},"321":{"position":[[20,1],[155,2],[442,1],[460,2]]},"342":{"position":[[224,2],[362,2]]},"344":{"position":[[396,5]]},"444":{"position":[[54,1]]}}}],["h(p",{"_index":1611,"t":{"332":{"position":[[312,4],[1157,4],[1213,4],[1231,4]]}}}],["h(p)=−∑inpi",{"_index":1608,"t":{"332":{"position":[[259,11]]}}}],["h)+\\lambda",{"_index":1763,"t":{"342":{"position":[[248,11]]}}}],["h,t",{"_index":1313,"t":{"273":{"position":[[1182,4]]}}}],["h=2x(wx+b−y)h=2",{"_index":1756,"t":{"342":{"position":[[49,15]]}}}],["hash",{"_index":851,"t":{"244":{"position":[[609,19],[646,19],[678,19],[715,19]]}}}],["hat{i",{"_index":1693,"t":{"340":{"position":[[156,10]]}}}],["heap",{"_index":1541,"t":{"299":{"position":[[30,5]]}}}],["heap.clear",{"_index":1542,"t":{"299":{"position":[[36,13]]}}}],["heap.empti",{"_index":1544,"t":{"299":{"position":[[63,13]]}}}],["heap.push",{"_index":1545,"t":{"299":{"position":[[107,10]]}}}],["heap.siz",{"_index":1543,"t":{"299":{"position":[[50,12]]}}}],["higher",{"_index":1602,"t":{"332":{"position":[[90,6],[132,6]]}}}],["hinton提出的一种卷积神经网络模型,它主要应用于图像分类任务。在当时,alexnet的表现远远超过了其他参赛的网络模型,并且在imagenet",{"_index":1898,"t":{"374":{"position":[[55,83]]}}}],["hit",{"_index":351,"t":{"110":{"position":[[114,3],[1326,3]]}}}],["hollings@cs.umd.edu",{"_index":389,"t":{"110":{"position":[[783,21]]}}}],["hollingsworth",{"_index":388,"t":{"110":{"position":[[769,13]]}}}],["host",{"_index":258,"t":{"95":{"position":[[735,4],[805,4]]}}}],["host_cc",{"_index":271,"t":{"95":{"position":[[962,7]]}}}],["hovemey",{"_index":383,"t":{"110":{"position":[[706,9]]}}}],["http://127.0.0.1:7890",{"_index":1912,"t":{"383":{"position":[[199,23]]}}}],["http_proxi",{"_index":2101,"t":{"430":{"position":[[244,10]]}}}],["http_proxy=http://127.0.0.1:7890",{"_index":2096,"t":{"430":{"position":[[100,32]]}}}],["https://127.0.0.1:7890",{"_index":1914,"t":{"383":{"position":[[251,24]]}}}],["https://blog.csdn.net/f_zyj/article/details/51594851",{"_index":832,"t":{"242":{"position":[[4,52]]}}}],["https://download.csdn.net/download/f_zyj/9988653",{"_index":833,"t":{"242":{"position":[[57,48]]}}}],["https://towardsdatascience.com/intuit",{"_index":1670,"t":{"334":{"position":[[108,46]]}}}],["https://www.bilibili.com/video/bv12u411s7us/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":198,"t":{"70":{"position":[[284,107]]}}}],["https://www.bilibili.com/video/bv13r4y1m7sq/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":205,"t":{"70":{"position":[[732,107]]}}}],["https://www.bilibili.com/video/bv1564y1e7b9/?spm_id_from=333.999.0.0&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":2167,"t":{"456":{"position":[[28,116]]}}}],["https://www.bilibili.com/video/bv1pl4y1e7re/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":195,"t":{"70":{"position":[[121,107]]}}}],["https://www.bilibili.com/video/bv1vm4y1q7xb/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":201,"t":{"70":{"position":[[458,107]]}}}],["https_proxi",{"_index":2102,"t":{"430":{"position":[[261,11]]}}}],["https_proxy=https://127.0.0.1:7890",{"_index":2097,"t":{"430":{"position":[[140,34]]}}}],["hwnew",{"_index":1759,"t":{"342":{"position":[[134,5]]}}}],["hw×cihw",{"_index":1569,"t":{"319":{"position":[[51,15]]}}}],["hyperparamet",{"_index":1893,"t":{"371":{"position":[[80,14]]}}}],["h′=h+n−1n(1)h'=\\frac{h+n",{"_index":340,"t":{"101":{"position":[[275,24]]}}}],["h′以及w′h'以及w'h′以及w",{"_index":1581,"t":{"321":{"position":[[177,24]]}}}],["h外,w",{"_index":1782,"t":{"346":{"position":[[0,24]]}}}],["h给我们一个w",{"_index":1771,"t":{"344":{"position":[[74,38]]}}}],["i+1",{"_index":1390,"t":{"273":{"position":[[3182,4]]}}}],["i,a,b",{"_index":1311,"t":{"273":{"position":[[1144,6]]}}}],["i,j",{"_index":1375,"t":{"273":{"position":[[2802,4]]}}}],["i,ji,ji,j代表输出神经元的二维索引坐标,h,wh,wh,w",{"_index":1492,"t":{"285":{"position":[[27,49]]}}}],["i/o",{"_index":682,"t":{"220":{"position":[[188,5]]}}}],["i440fxsupport",{"_index":322,"t":{"95":{"position":[[1880,14]]},"112":{"position":[[824,14]]}}}],["i<0",{"_index":1317,"t":{"273":{"position":[[1316,5],[1379,5]]}}}],["i<=k",{"_index":1348,"t":{"273":{"position":[[2386,5]]}}}],["ir2",{"_index":1474,"t":{"280":{"position":[[352,9]]}}}],["if(la>ra",{"_index":1407,"t":{"276":{"position":[[365,9],[707,9]]},"278":{"position":[[266,9],[495,9]]}}}],["if(n==0||m==0",{"_index":1256,"t":{"271":{"position":[[2300,15]]}}}],["if(num==n",{"_index":1254,"t":{"271":{"position":[[2196,10]]}}}],["if(read_key(&keycod",{"_index":356,"t":{"110":{"position":[[161,22],[1373,22]]}}}],["if(rt",{"_index":1483,"t":{"280":{"position":[[560,7]]}}}],["if(rt==0",{"_index":1423,"t":{"276":{"position":[[990,9],[1380,9],[1495,9],[1611,9]]},"278":{"position":[[700,9],[1061,9],[1168,9],[1276,9]]}}}],["if(t[w].l!=0",{"_index":1428,"t":{"276":{"position":[[1195,13]]},"278":{"position":[[897,13]]}}}],["if(t[w].r!=0",{"_index":1430,"t":{"276":{"position":[[1225,13]]},"278":{"position":[[927,13]]}}}],["if(topologicalsort",{"_index":1262,"t":{"271":{"position":[[2423,22]]}}}],["ifm",{"_index":2157,"t":{"454":{"position":[[266,3],[300,3],[339,3],[379,3]]}}}],["ifndef",{"_index":1268,"t":{"273":{"position":[[37,7]]}}}],["image数据,在传入visdom时仍需要先转化为numpi",{"_index":2085,"t":{"424":{"position":[[355,35]]}}}],["import",{"_index":17,"t":{"3":{"position":[[292,6],[315,6],[696,6],[719,6]]},"5":{"position":[[284,6],[307,6],[688,6],[711,6]]},"19":{"position":[[292,6],[315,6],[696,6],[719,6]]},"21":{"position":[[284,6],[307,6],[688,6],[711,6]]},"325":{"position":[[269,6],[292,6]]},"327":{"position":[[258,6],[281,6]]},"332":{"position":[[1474,6]]},"383":{"position":[[0,6],[13,6],[43,6],[73,6],[105,6],[141,6],[162,6]]},"424":{"position":[[12,6]]},"426":{"position":[[12,6]]}}}],["in[maxn",{"_index":1466,"t":{"280":{"position":[[255,9]]}}}],["includ",{"_index":402,"t":{"110":{"position":[[1005,8],[1034,8],[1061,8],[1088,8],[1112,8],[1138,8],[1162,8],[1186,8],[1214,8],[1239,8],[1265,8]]},"126":{"position":[[40,8]]},"195":{"position":[[0,8]]},"205":{"position":[[0,8]]},"211":{"position":[[0,8]]},"246":{"position":[[2062,8]]},"248":{"position":[[1443,8],[1463,8]]},"269":{"position":[[0,8],[20,8],[38,8]]},"271":{"position":[[0,8],[21,8],[41,8],[59,8]]},"307":{"position":[[0,8]]}}}],["include::iter",{"_index":1071,"t":{"256":{"position":[[765,14]]}}}],["int[col",{"_index":831,"t":{"240":{"position":[[84,9]]}}}],["int[nrow",{"_index":823,"t":{"238":{"position":[[77,8]]}}}],["intuit",{"_index":1769,"t":{"344":{"position":[[50,9],[139,9]]}}}],["intut",{"_index":1774,"t":{"344":{"position":[[384,8],[558,8]]}}}],["ios::sync_with_stdio(fals",{"_index":641,"t":{"207":{"position":[[346,28]]}}}],["iostream",{"_index":976,"t":{"248":{"position":[[1452,10]]},"269":{"position":[[9,10]]},"271":{"position":[[30,10]]}}}],["is_heap",{"_index":930,"t":{"246":{"position":[[2236,7]]}}}],["is_heap_untilc++11",{"_index":931,"t":{"246":{"position":[[2258,18]]}}}],["is_list",{"_index":1232,"t":{"271":{"position":[[1508,10]]}}}],["is_partitionedc++11",{"_index":907,"t":{"246":{"position":[[1542,19]]}}}],["is_permutationc++11",{"_index":936,"t":{"246":{"position":[[2397,19]]}}}],["is_sorted_untilc++11",{"_index":914,"t":{"246":{"position":[[1749,20]]}}}],["is_sortedc++11",{"_index":913,"t":{"246":{"position":[[1722,14]]}}}],["is_tre",{"_index":1236,"t":{"271":{"position":[[1656,10]]}}}],["isclos",{"_index":2151,"t":{"454":{"position":[[128,12]]}}}],["isdag",{"_index":794,"t":{"231":{"position":[[624,6],[660,6]]},"269":{"position":[[358,6],[394,6],[550,7]]},"271":{"position":[[499,6],[535,6],[746,7]]}}}],["isdag(1",{"_index":800,"t":{"231":{"position":[[741,8]]},"271":{"position":[[616,8]]}}}],["isdag(isdag",{"_index":797,"t":{"231":{"position":[[682,12]]},"269":{"position":[[416,12]]},"271":{"position":[[557,12]]}}}],["isinstance(net",{"_index":1946,"t":{"383":{"position":[[955,15]]}}}],["isinstance(x",{"_index":1953,"t":{"383":{"position":[[1114,13]]}}}],["it'",{"_index":1599,"t":{"332":{"position":[[62,4]]}}}],["iter",{"_index":689,"t":{"222":{"position":[[78,8]]},"246":{"position":[[835,22]]},"369":{"position":[[8,9],[272,9]]}}}],["iter_swap",{"_index":885,"t":{"246":{"position":[[825,9]]}}}],["it指向的元素前插入n个新元素val",{"_index":962,"t":{"248":{"position":[[729,22]]},"250":{"position":[[750,22]]}}}],["it指向的元素前插入新元素val",{"_index":961,"t":{"248":{"position":[[685,20]]},"250":{"position":[[706,20]]}}}],["iulian",{"_index":391,"t":{"110":{"position":[[827,6]]}}}],["j",{"_index":660,"t":{"217":{"position":[[7,1],[111,1]]},"269":{"position":[[1989,1],[1996,1],[2011,4]]},"273":{"position":[[2924,1],[3086,2]]}}}],["jeffrey",{"_index":386,"t":{"110":{"position":[[758,7]]}}}],["k",{"_index":387,"t":{"110":{"position":[[766,2]]},"217":{"position":[[161,2],[188,2]]},"273":{"position":[[2259,2]]},"367":{"position":[[0,1],[126,1]]},"369":{"position":[[21,1]]},"460":{"position":[[173,15]]}}}],["k+1",{"_index":1359,"t":{"273":{"position":[[2615,5]]}}}],["k_h",{"_index":1576,"t":{"321":{"position":[[73,3]]}}}],["k_wco​×ci​×kh​×kw",{"_index":1577,"t":{"321":{"position":[[84,18]]}}}],["kernel",{"_index":330,"t":{"95":{"position":[[2050,7]]},"110":{"position":[[1723,6],[1764,6],[2162,6]]},"112":{"position":[[994,7]]}}}],["kernel_size=5",{"_index":1928,"t":{"383":{"position":[[574,14],[673,15]]}}}],["kernel_thread",{"_index":373,"t":{"110":{"position":[[542,13],[2224,13]]}}}],["key",{"_index":443,"t":{"110":{"position":[[2192,4]]},"256":{"position":[[63,4],[920,31]]}}}],["key]操作是map很有特色的操作,如果在map中存在键值为key",{"_index":1066,"t":{"256":{"position":[[371,39]]}}}],["key_ctrl_flag)==key_ctrl_flag",{"_index":364,"t":{"110":{"position":[[317,29],[1529,29]]}}}],["key_release_flag",{"_index":359,"t":{"110":{"position":[[234,21],[1446,21]]}}}],["key_special_flag",{"_index":358,"t":{"110":{"position":[[202,17],[1414,17]]}}}],["keyboard_serial_delay",{"_index":315,"t":{"95":{"position":[[1776,22]]},"112":{"position":[[720,22]]}}}],["keycod",{"_index":354,"t":{"110":{"position":[[133,7],[141,8],[223,8],[284,7],[1345,7],[1353,8],[1435,8],[1496,7]]}}}],["key的元素对,值域为默认值。所以可以用该操作向map",{"_index":1067,"t":{"256":{"position":[[411,74]]}}}],["key部分作为标识,map中所有元素的key值必须是唯一的,multimap则允许有重复的key",{"_index":1061,"t":{"256":{"position":[[90,50]]}}}],["key(当另一个元素是整形时,m[key]=0",{"_index":1070,"t":{"256":{"position":[[694,58]]}}}],["kh=kw=1k_h=k_w=1kh​=kw​=1",{"_index":1568,"t":{"319":{"position":[[0,50]]}}}],["kind",{"_index":1292,"t":{"273":{"position":[[611,5],[1277,5]]}}}],["kkk",{"_index":1875,"t":{"367":{"position":[[179,3],[239,3]]},"369":{"position":[[57,3],[183,3]]}}}],["kl",{"_index":1618,"t":{"332":{"position":[[473,5],[479,15],[689,11]]}}}],["known",{"_index":1676,"t":{"336":{"position":[[51,5],[212,5]]}}}],["kpple",{"_index":2117,"t":{"439":{"position":[[30,5]]}}}],["krizhevsky、ilya",{"_index":1896,"t":{"374":{"position":[[20,15]]}}}],["kruskal",{"_index":499,"t":{"131":{"position":[[90,9]]}}}],["kullback–leibl",{"_index":1619,"t":{"332":{"position":[[495,16]]}}}],["k−1k",{"_index":1880,"t":{"369":{"position":[[112,4]]}}}],["k个变为v",{"_index":1565,"t":{"309":{"position":[[152,7]]}}}],["k的结点是u(第k+1个是u",{"_index":1346,"t":{"273":{"position":[[2336,21]]}}}],["l",{"_index":208,"t":{"77":{"position":[[84,1]]},"79":{"position":[[76,1]]},"248":{"position":[[1527,2]]},"340":{"position":[[154,1]]},"383":{"position":[[2035,1]]}}}],["l(w",{"_index":1835,"t":{"362":{"position":[[43,4]]},"364":{"position":[[314,4]]}}}],["l(w,b",{"_index":1832,"t":{"362":{"position":[[4,6],[124,6]]}}}],["l(w,b)+λ2∥w∥12(2)l(w",{"_index":1846,"t":{"364":{"position":[[47,21]]}}}],["l(w_t",{"_index":1856,"t":{"364":{"position":[[474,6]]}}}],["l(y,z)=max(0,−y∗z)(1)l(y,z)=max(0",{"_index":2044,"t":{"402":{"position":[[366,34]]}}}],["l,r",{"_index":1396,"t":{"276":{"position":[[203,4]]},"278":{"position":[[203,4]]}}}],["l,r,d",{"_index":1467,"t":{"280":{"position":[[283,6]]}}}],["l.backward",{"_index":1998,"t":{"383":{"position":[[2054,12]]}}}],["l.push_back(x",{"_index":978,"t":{"248":{"position":[[1555,15]]}}}],["l.size",{"_index":979,"t":{"248":{"position":[[1586,8]]}}}],["l1",{"_index":1478,"t":{"280":{"position":[[433,3]]},"334":{"position":[[158,2]]},"336":{"position":[[60,2]]},"340":{"position":[[254,13],[268,27]]},"342":{"position":[[146,3]]},"346":{"position":[[68,4]]}}}],["l1,int",{"_index":1469,"t":{"280":{"position":[[315,6]]}}}],["l1:l1:l1",{"_index":1739,"t":{"340":{"position":[[1478,9]]}}}],["l1=(wx+b−y)2+λ∣w∣l_{1}=(w",{"_index":1697,"t":{"340":{"position":[[296,25]]}}}],["l1具有将权重推向0的影响,而l2没有,但这并不意味着由于l2的权重不能达到或者接近0",{"_index":1807,"t":{"348":{"position":[[580,46]]}}}],["l1和l2",{"_index":1669,"t":{"334":{"position":[[3,29],[259,36],[296,96]]},"338":{"position":[[0,53]]}}}],["l1和l2正则化分别归因于向量w的l1和l2",{"_index":1674,"t":{"336":{"position":[[0,37]]}}}],["l1和l2正则化的效果,让我们使用3",{"_index":1690,"t":{"340":{"position":[[0,45]]}}}],["l1完全减少了模型中的特征数量。以下是l1",{"_index":1794,"t":{"348":{"position":[[134,47]]}}}],["l1正则化(硬性限制)、l2",{"_index":1830,"t":{"360":{"position":[[200,35]]}}}],["l1正则化会使得一部分参数变为0,从而实现特征选择的效果;l2正则化则会使得模型参数尽量接近0",{"_index":1831,"t":{"360":{"position":[[236,93]]}}}],["l1正则化限制权重参数的l1",{"_index":1843,"t":{"362":{"position":[[154,26]]}}}],["l1的权重更新会受到第一点的影响,但来自l2",{"_index":1788,"t":{"346":{"position":[[151,78]]}}}],["l1范数进行正则化的线性回归模型称为lasso",{"_index":1681,"t":{"336":{"position":[[622,25]]}}}],["l2",{"_index":1671,"t":{"334":{"position":[[165,2]]},"336":{"position":[[221,2],[667,44],[854,15]]},"340":{"position":[[559,13]]},"346":{"position":[[73,3],[100,4],[146,4]]}}}],["l2,int",{"_index":1471,"t":{"280":{"position":[[329,6]]}}}],["l2:l2:l2",{"_index":1750,"t":{"340":{"position":[[1954,9]]},"342":{"position":[[317,9]]}}}],["l2=(wx+b−y)2+λw2l_{2}=(w",{"_index":1706,"t":{"340":{"position":[[596,24]]}}}],["l2正则化是指在模型的损失函数中,加入对模型参数的l2",{"_index":1845,"t":{"364":{"position":[[0,46]]}}}],["l2正则化项添加到l",{"_index":1705,"t":{"340":{"position":[[573,22]]}}}],["l2范数是对元素求平方和后再开根号,需要.pow(2",{"_index":48,"t":{"7":{"position":[[24,36]]},"23":{"position":[[24,36]]},"330":{"position":[[0,36]]}}}],["l:l:l",{"_index":1732,"t":{"340":{"position":[[1289,6]]},"342":{"position":[[99,6]]}}}],["l=(y^−y)2=(wx+b−y)2\\begin{align",{"_index":1692,"t":{"340":{"position":[[119,34]]}}}],["l[i",{"_index":980,"t":{"248":{"position":[[1623,4]]}}}],["l_{1}}{\\partial",{"_index":1741,"t":{"340":{"position":[[1628,15]]}}}],["l_{2}}{\\partial",{"_index":1752,"t":{"340":{"position":[[2056,15]]}}}],["la",{"_index":1413,"t":{"276":{"position":[[477,3],[820,3]]},"278":{"position":[[346,3],[575,3]]}}}],["la,int",{"_index":1399,"t":{"276":{"position":[[253,6],[651,6]]},"278":{"position":[[239,6],[468,6]]}}}],["la,ra",{"_index":1403,"t":{"276":{"position":[[281,12],[679,12]]}}}],["label",{"_index":1662,"t":{"332":{"position":[[1632,5],[1678,6],[1815,5],[1864,6]]}}}],["label)16",{"_index":1730,"t":{"340":{"position":[[1250,9]]}}}],["label)9",{"_index":1724,"t":{"340":{"position":[[1056,8]]}}}],["lalr(1",{"_index":203,"t":{"70":{"position":[[620,10]]}}}],["lambda",{"_index":1747,"t":{"340":{"position":[[1813,8],[2108,7]]},"342":{"position":[[227,8],[367,7]]},"364":{"position":[[137,23],[337,7],[556,7]]}}}],["lambda)w_t",{"_index":1855,"t":{"364":{"position":[[440,11]]}}}],["last",{"_index":951,"t":{"248":{"position":[[370,6],[422,10],[772,5],[876,7],[914,5],[1168,5],[1225,16]]},"250":{"position":[[144,5],[793,5],[897,7],[935,5]]}}}],["last)插入到迭代器it",{"_index":964,"t":{"248":{"position":[[810,20]]},"250":{"position":[[831,20]]}}}],["last),[first",{"_index":974,"t":{"248":{"position":[[1211,13]]}}}],["later",{"_index":253,"t":{"95":{"position":[[668,5]]}}}],["latest",{"_index":303,"t":{"95":{"position":[[1563,6],[1636,6]]},"112":{"position":[[507,6],[580,6]]}}}],["lb,int",{"_index":1401,"t":{"276":{"position":[[267,6],[665,6]]},"278":{"position":[[253,6],[482,6]]}}}],["lb,rb",{"_index":1404,"t":{"276":{"position":[[294,12],[692,12]]}}}],["ld",{"_index":274,"t":{"95":{"position":[[1003,2]]}}}],["learn",{"_index":1866,"t":{"367":{"position":[[55,8]]}}}],["lecun等人于1998年提出的卷积神经网络结构,该结构由卷积层、池化层和全连接层组成,可以高效地处理手写数字图像,并在mnist",{"_index":1904,"t":{"381":{"position":[[12,78]]}}}],["left\\{\\begin{align",{"_index":1746,"t":{"340":{"position":[[1718,24]]}}}],["left\\{\\begin{array}{l",{"_index":1762,"t":{"342":{"position":[[193,27]]}}}],["legend=[\"curve_name_1",{"_index":2088,"t":{"426":{"position":[[110,23]]}}}],["legend=['train",{"_index":1982,"t":{"383":{"position":[[1736,14]]}}}],["len",{"_index":487,"t":{"126":{"position":[[828,3],[949,3]]}}}],["len(train_it",{"_index":1987,"t":{"383":{"position":[[1818,15]]}}}],["len==1,则la==ra",{"_index":1405,"t":{"276":{"position":[[312,41]]}}}],["len=v.siz",{"_index":1432,"t":{"276":{"position":[[1261,13]]},"278":{"position":[[963,13]]}}}],["lenet",{"_index":1905,"t":{"381":{"position":[[91,5]]},"383":{"position":[[2745,5]]}}}],["lenet5",{"_index":2020,"t":{"383":{"position":[[2753,8]]}}}],["lenet5(nn.modul",{"_index":1922,"t":{"383":{"position":[[424,18]]}}}],["lenetreshap",{"_index":1926,"t":{"383":{"position":[[526,15]]}}}],["lenetreshape(nn.modul",{"_index":1915,"t":{"383":{"position":[[282,24]]}}}],["lenet是由yann",{"_index":1903,"t":{"381":{"position":[[0,11]]}}}],["leq",{"_index":1839,"t":{"362":{"position":[[101,4]]}}}],["less",{"_index":1604,"t":{"332":{"position":[[111,4]]}}}],["lexicographical_compar",{"_index":937,"t":{"246":{"position":[[2437,23]]}}}],["lgorithm",{"_index":859,"t":{"246":{"position":[[0,12]]}}}],["lgpl",{"_index":302,"t":{"95":{"position":[[1558,4]]},"112":{"position":[[502,4]]}}}],["lighter",{"_index":2160,"t":{"454":{"position":[[284,9],[318,8]]}}}],["lightest",{"_index":2162,"t":{"454":{"position":[[357,9],[397,9]]}}}],["limits.h",{"_index":674,"t":{"220":{"position":[[78,8]]}}}],["line",{"_index":298,"t":{"95":{"position":[[1479,5]]},"112":{"position":[[423,5]]}}}],["linear",{"_index":27,"t":{"3":{"position":[[419,6]]},"5":{"position":[[411,6]]},"19":{"position":[[419,6]]},"21":{"position":[[411,6]]},"454":{"position":[[235,6]]}}}],["linker",{"_index":272,"t":{"95":{"position":[[991,7]]}}}],["linux",{"_index":266,"t":{"95":{"position":[[881,5]]},"450":{"position":[[3,15]]}}}],["linux操作系统后需要安装bochs以及nasm",{"_index":223,"t":{"87":{"position":[[39,30]]}}}],["linux自带的编译环境以及编译命令对特定的geeko",{"_index":224,"t":{"89":{"position":[[47,30]]}}}],["list",{"_index":105,"t":{"11":{"position":[[540,16]]},"13":{"position":[[530,16]]},"222":{"position":[[110,4]]},"250":{"position":[[0,8],[9,23],[198,10]]},"383":{"position":[[1128,6]]},"397":{"position":[[526,16]]}}}],["list.end",{"_index":655,"t":{"213":{"position":[[58,12],[138,13]]}}}],["lista(first",{"_index":985,"t":{"250":{"position":[[126,17]]}}}],["lista(n",{"_index":982,"t":{"250":{"position":[[51,13],[87,13]]}}}],["lista{1,2,3",{"_index":981,"t":{"250":{"position":[[33,17]]}}}],["list和tensor",{"_index":101,"t":{"11":{"position":[[423,17]]},"13":{"position":[[413,17]]},"397":{"position":[[409,17]]}}}],["list和vector",{"_index":839,"t":{"244":{"position":[[228,45]]}}}],["list或deque实现,封闭头部即可,不用vector",{"_index":841,"t":{"244":{"position":[[282,51],[342,51]]}}}],["list的*乘法是复制元素,改变list的shap",{"_index":102,"t":{"11":{"position":[[441,26]]},"13":{"position":[[431,26]]},"397":{"position":[[427,26]]}}}],["ll",{"_index":726,"t":{"226":{"position":[[360,2],[547,2],[633,2],[667,2]]},"276":{"position":[[118,3]]},"278":{"position":[[118,3]]},"280":{"position":[[204,3]]}}}],["locale.h",{"_index":675,"t":{"220":{"position":[[92,8]]}}}],["locatevex_al(*pg",{"_index":1325,"t":{"273":{"position":[[1579,17],[1605,17]]}}}],["locatevex_al(algraph",{"_index":1307,"t":{"273":{"position":[[917,20]]}}}],["locatevex_al(g",{"_index":1379,"t":{"273":{"position":[[2899,15],[2928,15]]}}}],["log",{"_index":313,"t":{"95":{"position":[[1757,4]]},"112":{"position":[[701,4]]}}}],["log2(1pi)(7)\\begin{align",{"_index":1610,"t":{"332":{"position":[[286,25]]}}}],["log2(pi)=∑inpi",{"_index":1609,"t":{"332":{"position":[[271,14]]}}}],["log2(pi)−log2(qi)](9)d_{kl}(p",{"_index":1632,"t":{"332":{"position":[[842,30]]}}}],["log2(qi)(10)\\begin{align",{"_index":1644,"t":{"332":{"position":[[1187,25]]}}}],["log2​(pi​)=i∑n​pi",{"_index":1616,"t":{"332":{"position":[[437,18]]}}}],["log2​(pi​)−log2​(qi​)](9",{"_index":1636,"t":{"332":{"position":[[959,26]]}}}],["log2​(pi​1​)​(7",{"_index":1617,"t":{"332":{"position":[[456,16]]}}}],["log2​(qi​)​(10",{"_index":1650,"t":{"332":{"position":[[1357,15]]}}}],["log_2(p_i",{"_index":1634,"t":{"332":{"position":[[910,11]]}}}],["log_2(q_i)]}\\tag{9}dkl​(p",{"_index":1635,"t":{"332":{"position":[[922,25]]}}}],["logist",{"_index":2,"t":{"3":{"position":[[20,10]]},"5":{"position":[[12,10]]},"19":{"position":[[20,10]]},"21":{"position":[[12,10]]},"388":{"position":[[0,8]]}}}],["long",{"_index":466,"t":{"120":{"position":[[3,17],[21,32]]},"226":{"position":[[363,4],[368,4],[394,4],[399,4]]},"266":{"position":[[1010,4]]},"276":{"position":[[108,4],[113,4]]},"278":{"position":[[108,4],[113,4]]},"280":{"position":[[194,4],[199,4]]}}}],["loss",{"_index":61,"t":{"7":{"position":[[292,4]]},"23":{"position":[[292,4]]},"142":{"position":[[129,4]]},"165":{"position":[[6,43],[87,14]]},"383":{"position":[[1623,4],[1751,6]]},"424":{"position":[[280,8]]}}}],["loss(y_hat",{"_index":1997,"t":{"383":{"position":[[2039,11]]}}}],["loss.to(devic",{"_index":1978,"t":{"383":{"position":[[1658,15]]}}}],["lossmse=∑[y−f(x)]2(1)loss_{ms",{"_index":49,"t":{"7":{"position":[[81,31]]},"23":{"position":[[81,31]]}}}],["lossmse=∑[y−f(x)]2(5)loss_{ms",{"_index":1594,"t":{"330":{"position":[[57,31]]}}}],["loss得到6",{"_index":533,"t":{"142":{"position":[[74,11]]}}}],["lot",{"_index":328,"t":{"95":{"position":[[1992,3]]},"112":{"position":[[936,3]]}}}],["lower_bound",{"_index":922,"t":{"246":{"position":[[1975,11]]}}}],["lr",{"_index":191,"t":{"65":{"position":[[29,9]]},"383":{"position":[[1414,3],[2720,3],[2810,3]]}}}],["lr(0",{"_index":194,"t":{"70":{"position":[[98,8]]},"75":{"position":[[138,6],[159,13]]}}}],["lr(1",{"_index":200,"t":{"70":{"position":[[435,8]]},"75":{"position":[[153,5]]}}}],["lr=lr",{"_index":1976,"t":{"383":{"position":[[1616,6]]}}}],["lru",{"_index":339,"t":{"101":{"position":[[233,25]]}}}],["lu",{"_index":612,"t":{"179":{"position":[[804,57]]},"191":{"position":[[676,57]]}}}],["l}{\\partial",{"_index":1714,"t":{"340":{"position":[[803,11],[1383,11]]}}}],["l定义为平方误差,其中误差是i",{"_index":1691,"t":{"340":{"position":[[76,22]]}}}],["m",{"_index":280,"t":{"95":{"position":[[1086,1]]},"250":{"position":[[101,2]]},"256":{"position":[[323,2]]},"422":{"position":[[52,1]]}}}],["m.clear",{"_index":1078,"t":{"256":{"position":[[1053,10]]}}}],["m.empti",{"_index":1077,"t":{"256":{"position":[[1032,10]]}}}],["m.erase(it",{"_index":1075,"t":{"256":{"position":[[952,12]]}}}],["m.erase(key",{"_index":1074,"t":{"256":{"position":[[903,13]]}}}],["m.find(key",{"_index":1072,"t":{"256":{"position":[[785,12]]}}}],["m.insert(make_pair(key",{"_index":1068,"t":{"256":{"position":[[486,23]]}}}],["m.size",{"_index":1076,"t":{"256":{"position":[[1012,9]]}}}],["m32",{"_index":257,"t":{"95":{"position":[[729,3],[978,3]]}}}],["m[key",{"_index":1064,"t":{"256":{"position":[[352,6],[683,7]]}}}],["machin",{"_index":1865,"t":{"367":{"position":[[47,7]]}}}],["magnitud",{"_index":1786,"t":{"346":{"position":[[77,9]]}}}],["main",{"_index":371,"t":{"110":{"position":[[503,31]]},"126":{"position":[[769,6]]},"195":{"position":[[50,6]]},"205":{"position":[[50,6]]},"211":{"position":[[50,6]]},"248":{"position":[[1506,6]]},"269":{"position":[[1733,6]]},"271":{"position":[[2284,7]]},"273":{"position":[[2708,6]]},"276":{"position":[[1704,6]]},"278":{"position":[[1369,6]]},"280":{"position":[[658,6]]}}}],["main(struct",{"_index":422,"t":{"110":{"position":[[1838,11]]}}}],["make",{"_index":219,"t":{"85":{"position":[[338,7]]},"89":{"position":[[144,7]]},"95":{"position":[[147,19]]},"112":{"position":[[70,4],[82,4]]}}}],["make_heap",{"_index":932,"t":{"246":{"position":[[2296,9]]}}}],["malloc(sizeof(arcnod",{"_index":1328,"t":{"273":{"position":[[1670,26]]}}}],["map",{"_index":690,"t":{"222":{"position":[[99,3]]},"256":{"position":[[0,7],[223,68],[292,13],[326,9],[339,9]]},"289":{"position":[[173,4]]},"307":{"position":[[9,5]]},"350":{"position":[[82,3]]},"356":{"position":[[43,21]]}}}],["map、<=、>=、==、!=,其规则是先比较first,first相等时再比较second",{"_index":1057,"t":{"254":{"position":[[246,110]]}}}],["pair也能进行sort",{"_index":1533,"t":{"293":{"position":[[81,12]]}}}],["pair对象外,如果需要即时生成一个pair对象,也可以调用在其中定义的一个模版函数:make_pair。make_pair",{"_index":1058,"t":{"254":{"position":[[357,92]]}}}],["pair模版类需要两个参数:首元素的数据类型和尾元素的数据类型。pair模版类对象有两个成员:first和second",{"_index":1056,"t":{"254":{"position":[[173,72]]}}}],["paramet",{"_index":1787,"t":{"346":{"position":[[136,9]]},"371":{"position":[[37,9]]}}}],["partial_sort",{"_index":916,"t":{"246":{"position":[[1822,12]]}}}],["partial_sort_copi",{"_index":917,"t":{"246":{"position":[[1840,17]]}}}],["partit",{"_index":909,"t":{"246":{"position":[[1590,9]]},"367":{"position":[[183,11]]}}}],["partition_copyc++11",{"_index":910,"t":{"246":{"position":[[1611,19]]}}}],["partition_pointc++11",{"_index":911,"t":{"246":{"position":[[1643,20]]}}}],["path",{"_index":1349,"t":{"273":{"position":[[2397,11]]}}}],["path/terminal_proxy.sh",{"_index":2106,"t":{"432":{"position":[[96,23]]}}}],["path[k]=0",{"_index":1361,"t":{"273":{"position":[[2691,10]]}}}],["path[k]=u",{"_index":1345,"t":{"273":{"position":[[2317,10]]}}}],["path[max_vertex_num",{"_index":1338,"t":{"273":{"position":[[2162,21]]}}}],["pathnum",{"_index":1353,"t":{"273":{"position":[[2483,10]]}}}],["pathnum=0",{"_index":1339,"t":{"273":{"position":[[2193,10],[3006,10]]}}}],["pathnum==0",{"_index":1385,"t":{"273":{"position":[[3096,12]]}}}],["paths[i",{"_index":1391,"t":{"273":{"position":[[3187,10]]}}}],["paths[maxsize][max_vertex_num",{"_index":1337,"t":{"273":{"position":[[2119,31]]}}}],["paths[pathnum][i",{"_index":1350,"t":{"273":{"position":[[2409,17]]}}}],["paths[pathnum][i]='\\0",{"_index":1352,"t":{"273":{"position":[[2453,23]]}}}],["pc",{"_index":211,"t":{"85":{"position":[[102,7]]}}}],["penalti",{"_index":1829,"t":{"360":{"position":[[151,48]]}}}],["perform",{"_index":1868,"t":{"367":{"position":[[80,11]]},"369":{"position":[[354,12]]}}}],["permit",{"_index":397,"t":{"110":{"position":[[918,9]]}}}],["pg",{"_index":1299,"t":{"273":{"position":[[759,4],[779,2],[795,2],[844,2],[1087,4],[1274,2],[1336,2],[1399,2],[1470,2],[1748,3],[1790,2],[1872,2]]}}}],["pi",{"_index":731,"t":{"226":{"position":[[445,2]]}}}],["pii",{"_index":729,"t":{"226":{"position":[[412,3]]}}}],["pip",{"_index":2072,"t":{"419":{"position":[[0,3]]}}}],["plasmpkg2",{"_index":2139,"t":{"439":{"position":[[336,9]]}}}],["platform",{"_index":262,"t":{"95":{"position":[[810,9],[841,9]]}}}],["plt",{"_index":1910,"t":{"383":{"position":[[158,3]]}}}],["plt.show",{"_index":2016,"t":{"383":{"position":[[2628,10]]}}}],["po",{"_index":1138,"t":{"266":{"position":[[487,4],[511,3],[692,3],[725,3],[794,3],[932,3]]}}}],["point",{"_index":378,"t":{"110":{"position":[[660,5],[1743,6]]}}}],["polynomi",{"_index":511,"t":{"134":{"position":[[752,10]]},"181":{"position":[[752,10]]}}}],["pop",{"_index":1186,"t":{"269":{"position":[[1650,4]]},"289":{"position":[[81,5],[122,5],[151,5]]}}}],["pop_heap",{"_index":933,"t":{"246":{"position":[[2318,8]]}}}],["postt(int",{"_index":1444,"t":{"276":{"position":[[1595,9]]},"278":{"position":[[1260,9]]},"280":{"position":[[544,9]]}}}],["postt(rt",{"_index":1458,"t":{"276":{"position":[[2056,10]]},"278":{"position":[[1642,10]]},"280":{"position":[[806,10]]}}}],["postt(t[rt].l",{"_index":1445,"t":{"276":{"position":[[1629,15]]},"278":{"position":[[1294,15]]},"280":{"position":[[585,15]]}}}],["postt(t[rt].r",{"_index":1446,"t":{"276":{"position":[[1645,15]]},"278":{"position":[[1310,15]]},"280":{"position":[[601,15]]}}}],["pos处的二进制位置为0",{"_index":1151,"t":{"266":{"position":[[863,14]]}}}],["pp_arr",{"_index":822,"t":{"238":{"position":[[63,7]]}}}],["pre",{"_index":1473,"t":{"280":{"position":[[346,3]]}}}],["pre[maxn",{"_index":1465,"t":{"280":{"position":[[244,10]]}}}],["predic",{"_index":908,"t":{"246":{"position":[[1562,27]]}}}],["predict",{"_index":1658,"t":{"332":{"position":[[1533,7],[1580,7],[1768,7]]}}}],["press",{"_index":442,"t":{"110":{"position":[[2184,7]]}}}],["pret(int",{"_index":1436,"t":{"276":{"position":[[1365,8]]},"278":{"position":[[1046,8]]}}}],["pret(rt",{"_index":1456,"t":{"276":{"position":[[2022,9]]},"278":{"position":[[1629,9]]}}}],["pret(t[rt].l",{"_index":1439,"t":{"276":{"position":[[1435,14]]},"278":{"position":[[1116,14]]}}}],["pret(t[rt].r",{"_index":1440,"t":{"276":{"position":[[1450,14]]},"278":{"position":[[1131,14]]}}}],["prev_permut",{"_index":945,"t":{"246":{"position":[[2667,16]]}}}],["prim",{"_index":500,"t":{"131":{"position":[[100,6]]}}}],["primari",{"_index":2159,"t":{"454":{"position":[[276,7],[310,7],[349,7],[389,7]]}}}],["print",{"_index":444,"t":{"110":{"position":[[2201,5]]}}}],["print(\"%c\",(asciicode=='\\r",{"_index":370,"t":{"110":{"position":[[443,28],[1658,28]]}}}],["print(\"\\n",{"_index":366,"t":{"110":{"position":[[376,9],[1588,9]]}}}],["print(\"argmax",{"_index":126,"t":{"11":{"position":[[885,13]]},"13":{"position":[[875,13]]},"397":{"position":[[871,13]]}}}],["print(\"to",{"_index":349,"t":{"110":{"position":[[99,9],[1311,9]]}}}],["print(\"welcom",{"_index":437,"t":{"110":{"position":[[2075,14]]}}}],["print(a",{"_index":106,"t":{"11":{"position":[[557,7]]},"13":{"position":[[547,7]]},"397":{"position":[[543,7]]}}}],["print(b",{"_index":109,"t":{"11":{"position":[[595,7]]},"13":{"position":[[585,7]]},"397":{"position":[[581,7]]}}}],["print(c.shap",{"_index":91,"t":{"11":{"position":[[266,14]]},"13":{"position":[[256,14]]},"397":{"position":[[252,14]]}}}],["print(d.shap",{"_index":98,"t":{"11":{"position":[[387,14]]},"13":{"position":[[377,14]]},"397":{"position":[[373,14]]}}}],["print(f'loss",{"_index":2008,"t":{"383":{"position":[[2464,12]]}}}],["print(f'{metric[2",{"_index":2012,"t":{"383":{"position":[[2545,18]]}}}],["print(i",{"_index":138,"t":{"11":{"position":[[1139,8],[1333,8]]},"13":{"position":[[1129,8],[1323,8]]},"397":{"position":[[1121,8],[1315,8]]}}}],["printf(\"%c",{"_index":1279,"t":{"273":{"position":[[312,12]]}}}],["printf(\"%d%c\",v[i],i==len",{"_index":1434,"t":{"276":{"position":[[1298,25]]},"278":{"position":[[1000,25]]}}}],["printf(\"%d\\n",{"_index":634,"t":{"205":{"position":[[430,14]]}}}],["printf(\"%d\\n\",t[rt].d",{"_index":1486,"t":{"280":{"position":[[628,23]]}}}],["printf(\"7.28",{"_index":1384,"t":{"273":{"position":[[3017,12]]}}}],["printf(\"\\t",{"_index":1386,"t":{"273":{"position":[[3111,10]]}}}],["printf(\"\\t%d",{"_index":1388,"t":{"273":{"position":[[3162,12]]}}}],["printf(first?first=0,\"%d",{"_index":1437,"t":{"276":{"position":[[1398,27],[1528,27],[1661,27]]},"278":{"position":[[1079,27],[1201,27],[1326,27]]}}}],["priority_queue,greater的元素对序列。序列中的元素以const",{"_index":1060,"t":{"256":{"position":[[68,21]]}}}],["t[maxn",{"_index":1397,"t":{"276":{"position":[[208,9]]},"278":{"position":[[208,9]]},"280":{"position":[[290,9]]}}}],["t[rt].d=pre[rt",{"_index":1479,"t":{"280":{"position":[[437,16]]}}}],["t[rt].l=create(l1,p1",{"_index":1480,"t":{"280":{"position":[[454,20]]}}}],["t[rt].l=mid_po_build(la,p1",{"_index":1419,"t":{"276":{"position":[[824,26]]},"278":{"position":[[579,26]]}}}],["t[rt].l=mid_pr_build(la,p1",{"_index":1414,"t":{"276":{"position":[[481,26]]},"278":{"position":[[350,26]]}}}],["t[rt].r=create(p1+1,r1,l2+p2+1,r2",{"_index":1482,"t":{"280":{"position":[[490,35]]}}}],["t[rt].r=mid_po_build(p1+1,ra,lb+p2,rb",{"_index":1421,"t":{"276":{"position":[[885,37]]},"278":{"position":[[621,37]]}}}],["t[rt].r=mid_pr_build(p1+1,ra,lb+p2+1,rb",{"_index":1416,"t":{"276":{"position":[[542,41]]},"278":{"position":[[392,41]]}}}],["tag{10",{"_index":1647,"t":{"332":{"position":[[1304,8]]}}}],["tag{1}",{"_index":158,"t":{"34":{"position":[[144,12]]}}}],["tag{1}3×3×3×4=108(1",{"_index":1819,"t":{"352":{"position":[[168,21]]}}}],["tag{1}acc=len(y)∑i(predi​==yi​)​(1",{"_index":2031,"t":{"394":{"position":[[196,36]]}}}],["tag{1}f(x)={0x​x<0x≥0​(1",{"_index":34,"t":{"3":{"position":[[515,26]]},"5":{"position":[[507,26]]},"19":{"position":[[515,26]]},"21":{"position":[[507,26]]}}}],["tag{1}h′=nh+n−1​(1",{"_index":342,"t":{"101":{"position":[[306,20]]}}}],["tag{1}l(y,z)=max(0,−y∗z)(1",{"_index":2046,"t":{"402":{"position":[[406,28]]}}}],["tag{1}lossmse​=∑[y−f(x)]2(1",{"_index":52,"t":{"7":{"position":[[136,29]]},"23":{"position":[[136,29]]}}}],["tag{1}min",{"_index":1841,"t":{"362":{"position":[[113,10]]}}}],["tag{1}s(yi​)=∑jn​eyjeyi​​(1",{"_index":44,"t":{"3":{"position":[[887,29]]},"5":{"position":[[879,29]]},"19":{"position":[[887,29]]},"21":{"position":[[879,29]]}}}],["tag{1}shapeoutput​=strideshapeinput​−sizekernel​+2∗padding​+1(1",{"_index":1583,"t":{"321":{"position":[[328,65]]}}}],["tag{1}yi,j​=h,w∑​wi,j,h,w​∗xh,w​(1",{"_index":1494,"t":{"285":{"position":[[142,36]]}}}],["tag{1}σ(x)=1+e−x1​(1",{"_index":7,"t":{"3":{"position":[[78,22]]},"5":{"position":[[70,22]]},"19":{"position":[[78,22]]},"21":{"position":[[70,22]]},"325":{"position":[[47,22]]}}}],["tag{2}",{"_index":163,"t":{"36":{"position":[[59,12]]}}}],["tag{2}3×3××3=27(2",{"_index":1823,"t":{"354":{"position":[[100,19]]}}}],["tag{2}dxd",{"_index":1588,"t":{"325":{"position":[[145,14]]}}}],["tag{2}dxdf(x)​={01​x<0x≥0​(2",{"_index":38,"t":{"3":{"position":[[651,30]]},"5":{"position":[[643,30]]},"19":{"position":[[651,30]]},"21":{"position":[[643,30]]}}}],["tag{2}dxdσ​=σ(1−σ)(2",{"_index":13,"t":{"3":{"position":[[169,22]]},"5":{"position":[[161,22]]},"19":{"position":[[169,22]]},"21":{"position":[[161,22]]}}}],["tag{2}l(w,b)+2λ​∥w∥12​(2",{"_index":1848,"t":{"364":{"position":[[110,26]]}}}],["tag{2}yi,j​=h,w∑​wi,j,h,w​∗xh,w​=a,b∑​vi,j,a,b​∗xi+a,j+b​(2",{"_index":1497,"t":{"285":{"position":[[364,61]]}}}],["tag{2}∥y−f(x)∥2​=2∑[y−f(x)]2​(2",{"_index":58,"t":{"7":{"position":[[244,33]]},"23":{"position":[[244,33]]}}}],["tag{3}1×1×3×4=12(3",{"_index":1827,"t":{"356":{"position":[[112,20]]}}}],["tag{3}a",{"_index":171,"t":{"38":{"position":[[210,11]]}}}],["tag{3}f(x)={0x​x<0x≥0​(3",{"_index":1591,"t":{"327":{"position":[[77,26]]}}}],["tag{3}yi,j​=a,b∑​vi,j,a,b​∗xi+a,j+b​=a,b∑​va,b​∗xi+a,j+b​(3",{"_index":1500,"t":{"285":{"position":[[636,61]]}}}],["tag{3}∂w∂​(l(w,b)+2λ​∥w∥12​)=∂w∂l(w,b)​+λw(3",{"_index":1853,"t":{"364":{"position":[[347,46]]}}}],["tag{4}dxdf(x)​={01​x<0x≥0​(4",{"_index":1593,"t":{"327":{"position":[[213,30]]}}}],["tag{4}wt+1​=(1−ηλ)wt​+η∂wt​∂l(wt​,bt​)​(4",{"_index":1859,"t":{"364":{"position":[[501,43]]}}}],["tag{4}yi,j​=a,b∑​va,b​∗xi+a,j+b​=a=−δ∑δ​b=−δ∑δ​va,b​∗xia​,j+b​(4",{"_index":1506,"t":{"287":{"position":[[194,66]]}}}],["tag{5}lossmse​=∑[y−f(x)]2(5",{"_index":1595,"t":{"330":{"position":[[112,29]]}}}],["tag{5}shapeoutput​=strideshapeinput​−sizekernel​+2∗padding​+1(5",{"_index":1510,"t":{"287":{"position":[[398,65]]}}}],["tag{6}∥y−f(x)∥2​=2∑[y−f(x)]2​(6",{"_index":1597,"t":{"330":{"position":[[220,33]]}}}],["tag{7",{"_index":1614,"t":{"332":{"position":[[403,7]]}}}],["taint",{"_index":1772,"t":{"344":{"position":[[356,7]]}}}],["target",{"_index":249,"t":{"95":{"position":[[635,6],[828,6],[938,6],[984,6]]}}}],["target_cc",{"_index":255,"t":{"95":{"position":[[692,9]]}}}],["target_cc_prefix)gcc",{"_index":256,"t":{"95":{"position":[[705,22]]}}}],["target_cc_prefix)ld",{"_index":279,"t":{"95":{"position":[[1063,21]]}}}],["target_ld",{"_index":278,"t":{"95":{"position":[[1050,9]]}}}],["tcp/ip",{"_index":2170,"t":{"460":{"position":[[34,13],[48,18]]}}}],["tcp和udp",{"_index":2172,"t":{"460":{"position":[[148,12]]}}}],["techniqu",{"_index":1864,"t":{"367":{"position":[[29,9]]}}}],["temp",{"_index":473,"t":{"124":{"position":[[158,4],[210,4],[253,13],[270,5],[377,4],[516,5]]},"126":{"position":[[244,4],[296,4],[339,13],[356,5],[463,4],[602,5]]}}}],["tensor(0.2684",{"_index":1664,"t":{"332":{"position":[[1695,14],[1881,14]]}}}],["tensor(0.2684)10​11",{"_index":1725,"t":{"340":{"position":[[1073,20]]}}}],["tensor(0.2684)python",{"_index":1731,"t":{"340":{"position":[[1268,20]]}}}],["tensor(1",{"_index":139,"t":{"11":{"position":[[1152,11]]},"13":{"position":[[1142,11]]},"397":{"position":[[1134,11]]}}}],["tensor(2",{"_index":141,"t":{"11":{"position":[[1175,11]]},"13":{"position":[[1165,11]]},"397":{"position":[[1157,11]]}}}],["tensor(3",{"_index":143,"t":{"11":{"position":[[1198,11]]},"13":{"position":[[1188,11]]},"397":{"position":[[1180,11]]}}}],["tensor(4",{"_index":140,"t":{"11":{"position":[[1164,10]]},"13":{"position":[[1154,10]]},"397":{"position":[[1146,10]]}}}],["tensor(5",{"_index":142,"t":{"11":{"position":[[1187,10]]},"13":{"position":[[1177,10]]},"397":{"position":[[1169,10]]}}}],["tensor(6",{"_index":144,"t":{"11":{"position":[[1210,10]]},"13":{"position":[[1200,10]]},"397":{"position":[[1192,10]]}}}],["tensor([1",{"_index":130,"t":{"11":{"position":[[961,10],[979,10],[1346,11]]},"13":{"position":[[951,10],[969,10],[1336,11]]},"397":{"position":[[947,10],[965,10],[1328,11]]}}}],["tensor([3",{"_index":148,"t":{"11":{"position":[[1385,11]]},"13":{"position":[[1375,11]]},"397":{"position":[[1367,11]]}}}],["tensor([4",{"_index":147,"t":{"11":{"position":[[1366,10]]},"13":{"position":[[1356,10]]},"397":{"position":[[1348,10]]}}}],["tensor([6",{"_index":149,"t":{"11":{"position":[[1405,10]]},"13":{"position":[[1395,10]]},"397":{"position":[[1387,10]]}}}],["tensor([[9",{"_index":107,"t":{"11":{"position":[[572,11]]},"13":{"position":[[562,11]]},"397":{"position":[[558,11]]}}}],["tensorflow框架,可以使用tensorboard",{"_index":2070,"t":{"417":{"position":[[3,36]]}}}],["tensor的*乘法是对tensor",{"_index":103,"t":{"11":{"position":[[468,28]]},"13":{"position":[[458,28]]},"397":{"position":[[454,28]]}}}],["terminal_proxy.sh",{"_index":2092,"t":{"430":{"position":[[0,23]]}}}],["test",{"_index":766,"t":{"231":{"position":[[29,4],[101,5],[232,4]]},"367":{"position":[[224,4],[277,7]]},"369":{"position":[[82,4],[240,4]]},"371":{"position":[[95,4],[115,5]]},"383":{"position":[[1771,5],[2519,4]]}}}],["test.first",{"_index":769,"t":{"231":{"position":[[107,10]]}}}],["test.four",{"_index":775,"t":{"231":{"position":[[176,9]]}}}],["test.second",{"_index":770,"t":{"231":{"position":[[125,11]]}}}],["test.third",{"_index":772,"t":{"231":{"position":[[149,10]]}}}],["test_acc",{"_index":2007,"t":{"383":{"position":[[2367,8],[2453,10]]}}}],["test_acc:.3f",{"_index":2011,"t":{"383":{"position":[[2528,16]]}}}],["test_it",{"_index":1965,"t":{"383":{"position":[[1391,10],[2405,10],[2668,9],[2787,10]]}}}],["text",{"_index":36,"t":{"3":{"position":[[568,7]]},"5":{"position":[[560,7]]},"19":{"position":[[568,7]]},"21":{"position":[[560,7]]},"327":{"position":[[130,7]]},"439":{"position":[[80,4]]}}}],["text{subject",{"_index":1836,"t":{"362":{"position":[[58,13]]}}}],["theta",{"_index":1840,"t":{"362":{"position":[[106,6],[192,23]]}}}],["third",{"_index":763,"t":{"229":{"position":[[52,6]]}}}],["third:\"method",{"_index":778,"t":{"231":{"position":[[258,13]]}}}],["thread",{"_index":374,"t":{"110":{"position":[[556,8],[565,6],[2169,6],[2238,8],[2247,6],[2324,6]]}}}],["three",{"_index":779,"t":{"231":{"position":[[272,7]]}}}],["time",{"_index":541,"t":{"144":{"position":[[143,6]]},"149":{"position":[[166,6]]},"151":{"position":[[68,6]]},"161":{"position":[[134,6]]},"163":{"position":[[70,6],[89,6],[115,6]]},"319":{"position":[[67,6],[98,6]]},"321":{"position":[[13,6],[22,6],[55,6],[66,6],[77,6],[115,6],[148,6],[158,6],[424,6],[435,6],[444,6],[453,6],[463,6]]},"350":{"position":[[93,6]]},"352":{"position":[[135,6],[144,6],[153,6]]},"354":{"position":[[70,6],[79,6],[86,6]]},"356":{"position":[[7,6],[80,6],[89,6],[98,6]]},"367":{"position":[[243,6],[290,5]]},"369":{"position":[[187,6]]}}}],["time.h",{"_index":685,"t":{"220":{"position":[[227,6]]}}}],["timer",{"_index":1984,"t":{"383":{"position":[[1784,6]]}}}],["timer.start",{"_index":1993,"t":{"383":{"position":[[1950,13]]}}}],["timer.stop",{"_index":2002,"t":{"383":{"position":[[2142,12]]}}}],["timer.sum():.1f",{"_index":2013,"t":{"383":{"position":[[2579,16]]}}}],["titile(if",{"_index":2121,"t":{"439":{"position":[[66,9]]}}}],["titl",{"_index":2115,"t":{"437":{"position":[[321,7]]},"439":{"position":[[100,5]]}}}],["title/window",{"_index":2120,"t":{"439":{"position":[[53,12]]}}}],["title文字不能垂直居中,可以更换为window",{"_index":2114,"t":{"437":{"position":[[295,25]]}}}],["tmp",{"_index":1319,"t":{"273":{"position":[[1435,5],[1545,5],[2882,5]]}}}],["tmp[0",{"_index":1322,"t":{"273":{"position":[[1555,7],[2915,8],[3051,7]]}}}],["tmp[2",{"_index":1324,"t":{"273":{"position":[[1567,7]]}}}],["tmp[20",{"_index":1377,"t":{"273":{"position":[[2832,8]]}}}],["tmp[3",{"_index":1380,"t":{"273":{"position":[[2944,8],[3059,8]]}}}],["tmp[max_vertex_num",{"_index":1312,"t":{"273":{"position":[[1156,20]]}}}],["toc",{"_index":659,"t":{"215":{"position":[[0,5]]}}}],["todo(\"start",{"_index":440,"t":{"110":{"position":[[2148,11]]}}}],["top",{"_index":1516,"t":{"289":{"position":[[115,6],[144,6]]}}}],["topologicalsort",{"_index":1239,"t":{"271":{"position":[[1758,30]]}}}],["torch",{"_index":20,"t":{"3":{"position":[[322,5],[726,5]]},"5":{"position":[[314,5],[718,5]]},"19":{"position":[[322,5],[726,5]]},"21":{"position":[[314,5],[718,5]]},"325":{"position":[[299,5]]},"327":{"position":[[288,5]]},"383":{"position":[[7,5],[37,5],[112,5]]}}}],["torch.concat((a",{"_index":95,"t":{"11":{"position":[[309,16]]},"13":{"position":[[299,16]]},"397":{"position":[[295,16]]}}}],["torch.linspac",{"_index":21,"t":{"3":{"position":[[332,15],[736,15]]},"5":{"position":[[324,15],[728,15]]},"19":{"position":[[332,15],[736,15]]},"21":{"position":[[324,15],[728,15]]},"325":{"position":[[309,15]]},"327":{"position":[[298,15]]}}}],["torch.log",{"_index":1653,"t":{"332":{"position":[[1444,9]]}}}],["torch.log(torch.softmax(predict",{"_index":1660,"t":{"332":{"position":[[1590,32]]},"340":{"position":[[968,32]]}}}],["torch.nn",{"_index":16,"t":{"3":{"position":[[283,8],[687,8]]},"5":{"position":[[275,8],[679,8]]},"19":{"position":[[283,8],[687,8]]},"21":{"position":[[275,8],[679,8]]},"325":{"position":[[260,8]]},"327":{"position":[[249,8]]},"332":{"position":[[1481,8]]},"340":{"position":[[857,8]]},"383":{"position":[[64,8]]}}}],["torch.nn.crossentropyloss",{"_index":1977,"t":{"383":{"position":[[1630,27]]}}}],["torch.nn.crossentropyloss相当于torch.softmax",{"_index":1652,"t":{"332":{"position":[[1400,41]]}}}],["torch.nn.modul",{"_index":1947,"t":{"383":{"position":[[971,17]]}}}],["torch.nn.nllloss",{"_index":1654,"t":{"332":{"position":[[1456,17]]}}}],["torch.nn.sequenti",{"_index":1925,"t":{"383":{"position":[[505,20]]}}}],["torch.optim.sgd(net.paramet",{"_index":1975,"t":{"383":{"position":[[1582,33]]}}}],["torch.rand(4",{"_index":82,"t":{"11":{"position":[[118,13],[157,13]]},"13":{"position":[[108,13],[147,13]]},"397":{"position":[[104,13],[143,13]]}}}],["torch.size([2",{"_index":92,"t":{"11":{"position":[[283,14]]},"13":{"position":[[273,14]]},"397":{"position":[[269,14]]}}}],["torch.size([4",{"_index":99,"t":{"11":{"position":[[404,14]]},"13":{"position":[[394,14]]},"397":{"position":[[390,14]]}}}],["torch.stack((a",{"_index":88,"t":{"11":{"position":[[196,15]]},"13":{"position":[[186,15]]},"397":{"position":[[182,15]]}}}],["torch.sum(y_hat.argmax(dim=1",{"_index":1962,"t":{"383":{"position":[[1328,29]]}}}],["torch.tensor([1",{"_index":134,"t":{"11":{"position":[[1061,16]]},"13":{"position":[[1051,16]]},"332":{"position":[[1640,16],[1823,16]]},"340":{"position":[[1018,16],[1208,16]]},"397":{"position":[[1043,16]]}}}],["torch.tensor([4",{"_index":135,"t":{"11":{"position":[[1089,16]]},"13":{"position":[[1079,16]]},"397":{"position":[[1071,16]]}}}],["torch.tensor([[0.1",{"_index":117,"t":{"11":{"position":[[791,19]]},"13":{"position":[[781,19]]},"397":{"position":[[777,19]]}}}],["torch.tensor([[1",{"_index":145,"t":{"11":{"position":[[1229,17]]},"13":{"position":[[1219,17]]},"397":{"position":[[1211,17]]}}}],["torch.tensor([[2",{"_index":1659,"t":{"332":{"position":[[1543,17],[1778,17]]},"340":{"position":[[921,17],[1162,17]]}}}],["torch.tensor([[3",{"_index":104,"t":{"11":{"position":[[501,17]]},"13":{"position":[[491,17]]},"397":{"position":[[487,17]]}}}],["torch.tensor([[4",{"_index":146,"t":{"11":{"position":[[1270,17]]},"13":{"position":[[1260,17]]},"397":{"position":[[1252,17]]}}}],["train",{"_index":1876,"t":{"367":{"position":[[214,5]]},"369":{"position":[[144,8]]},"371":{"position":[[0,5],[14,5]]},"383":{"position":[[1758,6],[2492,5]]}}}],["train(lenet",{"_index":2021,"t":{"383":{"position":[[2762,12]]}}}],["train(net",{"_index":1963,"t":{"383":{"position":[[1368,10]]}}}],["train_acc",{"_index":2005,"t":{"383":{"position":[[2187,9],[2349,10]]}}}],["train_acc:.3f",{"_index":2010,"t":{"383":{"position":[[2502,16]]}}}],["train_it",{"_index":1964,"t":{"383":{"position":[[1379,11],[2656,11],[2775,11]]}}}],["train_l",{"_index":2003,"t":{"383":{"position":[[2155,7],[2339,9]]}}}],["train_l:.3f",{"_index":2009,"t":{"383":{"position":[[2477,14]]}}}],["transform",{"_index":904,"t":{"246":{"position":[[1413,9]]}}}],["treasur",{"_index":459,"t":{"118":{"position":[[83,8]]}}}],["treat",{"_index":241,"t":{"95":{"position":[[336,7]]}}}],["trove",{"_index":460,"t":{"118":{"position":[[92,5]]}}}],["true",{"_index":805,"t":{"231":{"position":[[834,5]]},"258":{"position":[[231,11]]},"262":{"position":[[198,16]]},"269":{"position":[[698,5],[1559,5]]},"271":{"position":[[1047,5],[1108,6]]},"273":{"position":[[71,4]]}}}],["true;//加入拓扑排序的顶点为n",{"_index":1255,"t":{"271":{"position":[[2214,30]]}}}],["truth做bc",{"_index":532,"t":{"142":{"position":[[64,9]]}}}],["truth的bc",{"_index":535,"t":{"142":{"position":[[119,9]]}}}],["two",{"_index":774,"t":{"231":{"position":[[170,5]]}}}],["type(m",{"_index":1968,"t":{"383":{"position":[[1451,7],[1475,7]]}}}],["typedef",{"_index":781,"t":{"231":{"position":[[323,7],[540,7]]},"233":{"position":[[0,7]]},"269":{"position":[[126,7],[274,7]]},"271":{"position":[[198,7],[415,7]]},"273":{"position":[[165,7],[185,7],[357,7],[390,7],[489,7],[585,7]]},"276":{"position":[[100,7]]},"278":{"position":[[100,7]]},"280":{"position":[[186,7]]}}}],["u",{"_index":517,"t":{"140":{"position":[[0,4],[124,1]]},"217":{"position":[[198,1],[218,1],[239,1]]},"273":{"position":[[1992,2],[2264,3],[2310,6],[2512,9]]},"439":{"position":[[347,1]]}}}],["u,int",{"_index":1341,"t":{"273":{"position":[[2247,5]]}}}],["u==v",{"_index":1347,"t":{"273":{"position":[[2361,6]]}}}],["u=q.front();//取队首顶点u",{"_index":1245,"t":{"271":{"position":[[1926,20]]}}}],["udg",{"_index":1283,"t":{"273":{"position":[[374,4]]}}}],["ull",{"_index":727,"t":{"226":{"position":[[381,3],[588,3]]}}}],["uncertainti",{"_index":1606,"t":{"332":{"position":[[139,12]]}}}],["unchang",{"_index":2032,"t":{"394":{"position":[[322,9]]}}}],["uncom",{"_index":323,"t":{"95":{"position":[[1907,9]]},"112":{"position":[[851,9]]}}}],["undefin",{"_index":282,"t":{"95":{"position":[[1101,9]]}}}],["uniqu",{"_index":905,"t":{"246":{"position":[[1448,6],[1520,9]]}}}],["unique_copi",{"_index":906,"t":{"246":{"position":[[1491,11]]}}}],["unit",{"_index":28,"t":{"3":{"position":[[426,5]]},"5":{"position":[[418,5]]},"19":{"position":[[426,5]]},"21":{"position":[[418,5]]}}}],["unordered_map",{"_index":1520,"t":{"289":{"position":[[236,14]]},"307":{"position":[[136,14]]}}}],["unordered_multimap",{"_index":1522,"t":{"289":{"position":[[271,22]]}}}],["unordered_multimap的操作和set或者map等的操作基本一致,唯一的区别就是不支持类似lower_bound",{"_index":1560,"t":{"307":{"position":[[171,69]]}}}],["unordered_multiset",{"_index":1521,"t":{"289":{"position":[[251,19]]},"307":{"position":[[151,19]]}}}],["unordered_set",{"_index":1519,"t":{"289":{"position":[[221,14]]},"307":{"position":[[121,14]]}}}],["unset",{"_index":2099,"t":{"430":{"position":[[222,5],[238,5],[255,5]]}}}],["unsign",{"_index":728,"t":{"226":{"position":[[385,8]]},"266":{"position":[[1001,8]]}}}],["until",{"_index":2052,"t":{"402":{"position":[[646,5]]}}}],["updat",{"_index":1891,"t":{"371":{"position":[[47,6]]}}}],["update='append",{"_index":2083,"t":{"424":{"position":[[261,16],[336,16]]},"426":{"position":[[227,16]]}}}],["upper_bound",{"_index":923,"t":{"246":{"position":[[2014,11]]}}}],["us",{"_index":259,"t":{"95":{"position":[[760,4],[1206,4],[2025,6]]},"110":{"position":[[931,4]]},"112":{"position":[[969,6]]},"126":{"position":[[65,5]]},"195":{"position":[[25,5]]},"205":{"position":[[25,5]]},"211":{"position":[[25,5]]},"222":{"position":[[0,5]]},"224":{"position":[[250,5]]},"248":{"position":[[1481,5]]},"269":{"position":[[53,5]]},"271":{"position":[[119,5]]},"276":{"position":[[79,5]]},"278":{"position":[[79,5]]},"280":{"position":[[165,5]]},"367":{"position":[[39,4],[250,5]]},"369":{"position":[[70,4],[132,4],[215,4]]},"371":{"position":[[126,4]]}}}],["user",{"_index":858,"t":{"244":{"position":[[839,5]]}}}],["util",{"_index":1051,"t":{"254":{"position":[[0,11]]}}}],["v",{"_index":1335,"t":{"273":{"position":[[1995,8],[2612,2]]},"276":{"position":[[1118,2]]},"278":{"position":[[820,2]]}}}],["v,int",{"_index":1342,"t":{"273":{"position":[[2253,5]]}}}],["v.push_back(w",{"_index":1427,"t":{"276":{"position":[[1179,15]]},"278":{"position":[[881,15]]}}}],["v1",{"_index":545,"t":{"149":{"position":[[21,12]]}}}],["v=g.vertexs[u].connectors[i];//u的后继节点v",{"_index":1247,"t":{"271":{"position":[[2011,38]]}}}],["v_n",{"_index":157,"t":{"34":{"position":[[137,6]]},"38":{"position":[[171,4],[203,6]]}}}],["v_t",{"_index":155,"t":{"34":{"position":[[127,4]]},"36":{"position":[[53,5]]},"38":{"position":[[193,4]]}}}],["val",{"_index":960,"t":{"248":{"position":[[677,4],[722,6],[1038,4]]},"250":{"position":[[497,4],[698,4],[743,6],[982,4]]}}}],["valid",{"_index":1863,"t":{"367":{"position":[[13,10],[139,10]]},"369":{"position":[[34,11]]},"371":{"position":[[54,10]]}}}],["valu",{"_index":1065,"t":{"256":{"position":[[361,6],[510,8]]}}}],["var",{"_index":2156,"t":{"454":{"position":[[260,4],[294,4],[333,4],[373,4]]}}}],["vc++6.0中指针初始化为0xcccccccc",{"_index":1306,"t":{"273":{"position":[[873,26]]}}}],["vc++6.0中指针初始化为0xcccccccc,如果不将指针初始化为null",{"_index":1315,"t":{"273":{"position":[[1221,45]]}}}],["vec",{"_index":486,"t":{"126":{"position":[[819,4]]}}}],["vec.empti",{"_index":491,"t":{"126":{"position":[[970,13]]}}}],["vec.push_back(remaind",{"_index":489,"t":{"126":{"position":[[903,25]]}}}],["vec.rbegin",{"_index":494,"t":{"126":{"position":[[1023,13]]}}}],["vec.rend",{"_index":495,"t":{"126":{"position":[[1043,11]]}}}],["vector",{"_index":688,"t":{"222":{"position":[[68,6]]},"248":{"position":[[0,10],[149,81],[231,27],[433,12],[1301,33],[1338,55],[1472,8]]},"269":{"position":[[29,8]]},"271":{"position":[[50,8]]},"289":{"position":[[0,17]]},"291":{"position":[[94,19],[224,12]]}}}],["vector::iter",{"_index":1221,"t":{"271":{"position":[[1177,24]]}}}],["vector为底层容器,堆heap",{"_index":845,"t":{"244":{"position":[[448,41]]}}}],["vector对象,存储的是int",{"_index":947,"t":{"248":{"position":[[277,27]]}}}],["vector对象,并从由迭代器first和last定义的序列[first",{"_index":952,"t":{"248":{"position":[[380,41]]}}}],["vector的s",{"_index":1525,"t":{"291":{"position":[[61,18]]}}}],["ver",{"_index":1295,"t":{"273":{"position":[[644,5]]}}}],["veri",{"_index":329,"t":{"95":{"position":[[2018,4]]},"112":{"position":[[962,4]]}}}],["vernum",{"_index":1301,"t":{"273":{"position":[[798,7],[1339,7],[1456,8]]}}}],["vernum,arcnum",{"_index":1293,"t":{"273":{"position":[[621,14]]}}}],["vers[a].firstarc",{"_index":1332,"t":{"273":{"position":[[1752,18],[1793,18],[1875,17]]}}}],["vers[i].data=tmp[i",{"_index":1321,"t":{"273":{"position":[[1473,21]]}}}],["vers[i].firstarc",{"_index":1304,"t":{"273":{"position":[[847,17]]}}}],["version/src/project0/build目录下执行mak",{"_index":235,"t":{"95":{"position":[[43,35]]}}}],["version/src/projecti/build",{"_index":217,"t":{"85":{"position":[[287,33]]},"89":{"position":[[114,29]]}}}],["version/src/projecti/build文件夹下进行,即要在终端中通过cd",{"_index":449,"t":{"112":{"position":[[17,49]]}}}],["version/src/projecti/build目录下创建.bochsrc",{"_index":293,"t":{"95":{"position":[[1379,41]]}}}],["version/src/projecti/build目录下的makefi",{"_index":248,"t":{"95":{"position":[[593,39],[1158,39]]}}}],["version/src/projecti/build目录下的makefie文件(由于每个project下都存在一个对应的makefil",{"_index":242,"t":{"95":{"position":[[369,71]]}}}],["version/src/projecti/src/geeko",{"_index":216,"t":{"85":{"position":[[232,37]]}}}],["version/src/projecti/src/geekos/main.c",{"_index":346,"t":{"110":{"position":[[9,40]]}}}],["version/src/目录下会存在project0",{"_index":213,"t":{"85":{"position":[[131,26]]}}}],["vert",{"_index":1837,"t":{"362":{"position":[[83,5]]},"364":{"position":[[92,5],[273,5]]}}}],["vert^2_1",{"_index":1838,"t":{"362":{"position":[[91,9]]},"364":{"position":[[100,9],[281,9]]}}}],["vert_2",{"_index":56,"t":{"7":{"position":[[205,7]]},"23":{"position":[[205,7]]},"330":{"position":[[181,7]]}}}],["vertex",{"_index":782,"t":{"231":{"position":[[338,6],[414,8],[516,7],[578,8]]},"269":{"position":[[141,6],[198,8],[250,7],[312,8]]},"271":{"position":[[213,6],[289,8],[391,7],[453,8]]}}}],["vertex(int",{"_index":787,"t":{"231":{"position":[[460,10]]},"269":{"position":[[219,10]]},"271":{"position":[[335,10]]}}}],["vertexs.resize(1",{"_index":801,"t":{"231":{"position":[[752,18]]},"271":{"position":[[627,18]]}}}],["vertexs.resize(n",{"_index":798,"t":{"231":{"position":[[697,18]]},"269":{"position":[[431,18]]},"271":{"position":[[572,18]]}}}],["vertexs.s",{"_index":1160,"t":{"269":{"position":[[516,16]]},"271":{"position":[[712,16]]}}}],["vertexs[id1].connectors.push_back(id2",{"_index":1161,"t":{"269":{"position":[[560,39],[609,39]]},"271":{"position":[[756,39],[856,39]]}}}],["vertexs[id1].indegre",{"_index":1213,"t":{"271":{"position":[[962,24]]}}}],["vertexs[id1].outdegre",{"_index":1211,"t":{"271":{"position":[[796,25],[936,25]]}}}],["vertexs[id2].connectors.push_back(id1",{"_index":1162,"t":{"269":{"position":[[649,39]]},"271":{"position":[[896,39]]}}}],["vertexs[id2].indegre",{"_index":1212,"t":{"271":{"position":[[822,24],[1013,24]]}}}],["vertexs[id2].outdegre",{"_index":1214,"t":{"271":{"position":[[987,25]]}}}],["vertexs[id].connectors.s",{"_index":1174,"t":{"269":{"position":[[920,30],[1353,30]]}}}],["vertexs[id].connectors[i",{"_index":1175,"t":{"269":{"position":[[968,26],[1401,26]]}}}],["vertextyp",{"_index":1275,"t":{"273":{"position":[[218,10],[511,10],[941,10],[2108,10]]}}}],["vertic",{"_index":2124,"t":{"439":{"position":[[124,11]]}}}],["vga_update_interv",{"_index":317,"t":{"95":{"position":[[1805,20]]},"112":{"position":[[749,20]]}}}],["vgaromimag",{"_index":300,"t":{"95":{"position":[[1509,12]]},"112":{"position":[[453,12]]}}}],["vi",{"_index":2076,"t":{"424":{"position":[[26,3]]},"426":{"position":[[26,3]]}}}],["vis.line([0",{"_index":2078,"t":{"424":{"position":[[116,14]]}}}],["vis.line([[0",{"_index":2086,"t":{"426":{"position":[[41,14]]}}}],["vis.line([loss.item",{"_index":2084,"t":{"424":{"position":[[289,23]]}}}],["visdom",{"_index":2074,"t":{"419":{"position":[[12,6]]},"422":{"position":[[0,28],[37,6]]},"424":{"position":[[5,6],[19,6],[32,8]]},"426":{"position":[[5,6],[19,6],[32,8]]}}}],["visdom.serv",{"_index":2075,"t":{"422":{"position":[[54,13]]}}}],["visit",{"_index":1165,"t":{"269":{"position":[[753,8],[1132,8]]}}}],["visit(vertextyp",{"_index":1278,"t":{"273":{"position":[[290,16]]}}}],["visit[cnt]=0",{"_index":1383,"t":{"273":{"position":[[2992,13]]}}}],["visit[max_vertex_num",{"_index":1336,"t":{"273":{"position":[[2077,22]]}}}],["visit[p",{"_index":1356,"t":{"273":{"position":[[2568,8]]}}}],["visit[u]=0",{"_index":1360,"t":{"273":{"position":[[2679,11]]}}}],["visit[u]=1",{"_index":1344,"t":{"273":{"position":[[2298,11]]}}}],["visited.count(id1",{"_index":1176,"t":{"269":{"position":[[998,19],[1431,19]]}}}],["visited.insert(id1",{"_index":1178,"t":{"269":{"position":[[1044,20],[1497,20]]}}}],["visited.insert(start",{"_index":1169,"t":{"269":{"position":[[802,22],[1216,22]]}}}],["viz.line([[y1",{"_index":2090,"t":{"426":{"position":[[177,14]]}}}],["viz.line([real_y_data",{"_index":2081,"t":{"424":{"position":[[208,23]]}}}],["vnode",{"_index":1289,"t":{"273":{"position":[[504,6],[547,7]]}}}],["vnv_nvn",{"_index":193,"t":{"70":{"position":[[68,21]]}}}],["voc中,类别种类为20类,因此在预测阶段输出的[7",{"_index":563,"t":{"149":{"position":[[422,27]]}}}],["void",{"_index":348,"t":{"110":{"position":[[82,4],[1294,4],[1833,4]]},"271":{"position":[[1073,4]]},"273":{"position":[[285,4],[2215,4]]},"276":{"position":[[1073,4],[1360,4],[1475,4],[1590,4]]},"278":{"position":[[775,4],[1041,4],[1148,4],[1255,4]]},"280":{"position":[[539,4]]}}}],["vrtype",{"_index":1276,"t":{"273":{"position":[[248,6],[433,6]]}}}],["vtv_tvt",{"_index":192,"t":{"70":{"position":[[47,20]]}}}],["vt​∪vn​)∗(1",{"_index":159,"t":{"34":{"position":[[157,15]]}}}],["vt​∪vn​)∗(3",{"_index":172,"t":{"38":{"position":[[229,15]]}}}],["vt∗(2)",{"_index":160,"t":{"36":{"position":[[6,9]]}}}],["vt∗​(2",{"_index":164,"t":{"36":{"position":[[72,9]]}}}],["vt∪vn)∗(1)",{"_index":152,"t":{"34":{"position":[[73,14]]}}}],["vt∪vn)∗(3)\\alpha",{"_index":169,"t":{"38":{"position":[[115,19]]}}}],["v当前是第k",{"_index":1343,"t":{"273":{"position":[[2268,10]]}}}],["v的入度减1",{"_index":1249,"t":{"271":{"position":[[2067,9]]}}}],["w",{"_index":558,"t":{"149":{"position":[[307,2]]},"321":{"position":[[451,1]]},"338":{"position":[[83,17]]},"340":{"position":[[177,4],[410,1],[778,4],[1359,3],[1395,2],[1401,3],[1604,3],[1644,2],[1650,3],[1743,1],[1787,1],[2032,3],[2072,2],[2078,3],[2116,2]]},"342":{"position":[[129,4],[221,2],[245,2],[356,5]]},"344":{"position":[[115,23],[152,33]]},"346":{"position":[[66,1],[98,1]]},"362":{"position":[[89,1]]},"364":{"position":[[98,1],[237,2],[279,1],[332,2],[345,1]]},"402":{"position":[[564,1],[608,1],[612,1]]}}}],["w')o(co​×ci​×h×w×h′×w",{"_index":1585,"t":{"321":{"position":[[470,23]]}}}],["w'co​×h′×w",{"_index":1580,"t":{"321":{"position":[[165,11]]}}}],["w(l(w,b)+λ2∥w∥12)=∂l(w,b)∂w+λw(3)\\frac{\\partial}{\\parti",{"_index":1849,"t":{"364":{"position":[[177,59]]}}}],["w<0",{"_index":1703,"t":{"340":{"position":[[516,3],[1824,3]]},"342":{"position":[[262,3]]}}}],["w=q.front",{"_index":1426,"t":{"276":{"position":[[1157,12]]},"278":{"position":[[859,12]]}}}],["w>0",{"_index":1702,"t":{"340":{"position":[[504,3],[1780,3]]},"342":{"position":[[238,3]]}}}],["w][b,1,h,w]的tensor",{"_index":589,"t":{"157":{"position":[[196,46]]}}}],["w][b,1,h,w]的tensor,再将二者concat后通过7×77",{"_index":593,"t":{"161":{"position":[[97,36]]}}}],["w][b,c,h,w]分别经过最大池化和平均池化来压缩空间维度、学习通道之间的特征,得到[b,c,1,1][b",{"_index":590,"t":{"159":{"position":[[24,56]]}}}],["w][b,c,h,w]分别经过最大池化和平均池化(通过torch.max和torch.mean函数实现)得到[b,1,h,w][b",{"_index":592,"t":{"161":{"position":[[24,66]]}}}],["w][b,c,h,w]的特征图通过池化挤压宽高维度,得到[b,c,1,1][b",{"_index":604,"t":{"167":{"position":[[56,40]]}}}],["w][b,c,h,w]经过空间注意力机制算法得到[b,1,h,w][b",{"_index":588,"t":{"157":{"position":[[153,36]]}}}],["w][b,c,h,w]经过通道注意力机制算法得到[b,c,1,1][b",{"_index":586,"t":{"157":{"position":[[30,36]]}}}],["w^{2}l2​=(wx+b−y)2+λw2",{"_index":1708,"t":{"340":{"position":[[640,22]]}}}],["w_t",{"_index":1858,"t":{"364":{"position":[[496,4]]}}}],["w_{\\text",{"_index":1734,"t":{"340":{"position":[[1342,8],[1587,8],[2015,8]]}}}],["wall",{"_index":289,"t":{"95":{"position":[[1266,4],[1315,4]]}}}],["warn",{"_index":239,"t":{"95":{"position":[[321,8]]}}}],["wci​×h×w",{"_index":1573,"t":{"321":{"position":[[29,8]]}}}],["weight",{"_index":1287,"t":{"273":{"position":[[440,7]]},"338":{"position":[[101,8]]},"394":{"position":[[336,7]]},"454":{"position":[[420,7]]}}}],["welcom",{"_index":2150,"t":{"454":{"position":[[101,8]]}}}],["werror",{"_index":246,"t":{"95":{"position":[[507,6]]}}}],["while(!q.empti",{"_index":1244,"t":{"271":{"position":[[1903,18]]},"276":{"position":[[1133,17]]},"278":{"position":[[835,17]]}}}],["while(1",{"_index":355,"t":{"110":{"position":[[150,8],[1362,8]]}}}],["while(cin",{"_index":977,"t":{"248":{"position":[[1537,9]]}}}],["while(g.s",{"_index":1170,"t":{"269":{"position":[[825,14],[1273,14]]}}}],["while(in[p1]!=pre[rt",{"_index":1477,"t":{"280":{"position":[[398,22]]}}}],["while(it!=g.vertexs.end",{"_index":1223,"t":{"271":{"position":[[1226,27]]}}}],["while(m",{"_index":1218,"t":{"271":{"position":[[1128,7]]}}}],["while(mid[p1]!=rt",{"_index":1410,"t":{"276":{"position":[[431,18],[774,18]]},"278":{"position":[[315,18],[544,18]]}}}],["while(~scanf(\"%d\",&n",{"_index":1447,"t":{"276":{"position":[[1720,22]]},"278":{"position":[[1385,22]]}}}],["white",{"_index":464,"t":{"118":{"position":[[151,5]]}}}],["win='win_id",{"_index":2079,"t":{"424":{"position":[[137,13],[247,13],[322,13]]},"426":{"position":[[68,13],[213,13]]}}}],["window",{"_index":2141,"t":{"442":{"position":[[0,20]]}}}],["wise",{"_index":539,"t":{"144":{"position":[[73,7]]}}}],["wise)以及逐点(point",{"_index":538,"t":{"144":{"position":[[57,15]]}}}],["wnew",{"_index":1710,"t":{"340":{"position":[[736,4],[1296,4],[1488,4],[1964,4]]},"342":{"position":[[106,4],[150,4],[327,4]]}}}],["work",{"_index":254,"t":{"95":{"position":[[681,5]]}}}],["work.109",{"_index":277,"t":{"95":{"position":[[1040,9]]}}}],["write",{"_index":324,"t":{"95":{"position":[[1925,5]]},"112":{"position":[[869,5]]}}}],["wt+1=(1−ηλ)wt+η∂l(wt,bt)∂wt(4)w_{t+1}=(1",{"_index":1854,"t":{"364":{"position":[[394,40]]}}}],["wwnew",{"_index":1767,"t":{"342":{"position":[[375,5]]}}}],["www和偏置项bbb",{"_index":2048,"t":{"402":{"position":[[458,39]]}}}],["w}=\\left\\{\\begin{array}{l",{"_index":1701,"t":{"340":{"position":[[472,27]]}}}],["w}\\end{aligned}wnew",{"_index":1715,"t":{"340":{"position":[[815,19]]}}}],["w}\\right",{"_index":1745,"t":{"340":{"position":[[1705,9]]}}}],["w−h",{"_index":1760,"t":{"342":{"position":[[140,5]]}}}],["w−h)−2λw",{"_index":1768,"t":{"342":{"position":[[381,11]]}}}],["w−h)−2λww_{\\text",{"_index":1766,"t":{"342":{"position":[[332,18]]}}}],["w−h)−λ,(w−h)+λ,​w>0w<0",{"_index":1765,"t":{"342":{"position":[[289,27]]}}}],["w−h)−λ,w>0(w−h)+λ,w<0w_{\\text",{"_index":1761,"t":{"342":{"position":[[155,32]]}}}],["w−hw_{\\text",{"_index":1758,"t":{"342":{"position":[[111,12]]}}}],["w−η∂l1∂w=w−η⋅[2x(wx+b−y)+λd∣w∣dw]={w−η⋅[2x(wx+b−y)+λ]w>0w−η⋅[2x(wx+b−y)−λ]w<0\\begin{align",{"_index":1740,"t":{"340":{"position":[[1493,93]]}}}],["w−η∂l2∂w=w−η⋅[2x(wx+b−y)+2λw]\\begin{align",{"_index":1751,"t":{"340":{"position":[[1969,45]]}}}],["w−η∂l∂w=w−η⋅[2x(wx+b−y)]\\begin{align",{"_index":1733,"t":{"340":{"position":[[1301,40]]}}}],["w−η∂l∂w\\begin{aligned}w_{\\text",{"_index":1711,"t":{"340":{"position":[[741,31]]}}}],["w−η∂w∂l",{"_index":1716,"t":{"340":{"position":[[835,11]]}}}],["w−η∂w∂l1​​=w−η⋅[2x(wx+b−y)+λdwd∣w∣​]={w−η⋅[2x(wx+b−y)+λ]w−η⋅[2x(wx+b−y)−λ]​w>0w<0",{"_index":1749,"t":{"340":{"position":[[1867,86]]}}}],["w−η∂w∂l2​​=w−η⋅[2x(wx+b−y)+2λw",{"_index":1754,"t":{"340":{"position":[[2137,35]]}}}],["w−η∂w∂l​=w−η⋅[2x(wx+b−i",{"_index":1738,"t":{"340":{"position":[[1448,29]]}}}],["w∥12​≤θ(1",{"_index":1842,"t":{"362":{"position":[[142,11]]}}}],["w∥12≤θ(1)min",{"_index":1834,"t":{"362":{"position":[[22,13]]}}}],["w∥1=∣w1∣+∣w2∣+…+∣wn∣\\|\\mathbf{w}\\|_{1}=\\left|w_{1}\\right|+\\left|w_{2}\\right|+\\ldots+\\left|w_{n}\\right|∥w∥1​=∣w1​∣+∣w2​∣+…+∣wn",{"_index":1677,"t":{"336":{"position":[[70,128]]}}}],["w∥2=(∣w1∣2+∣w2∣2+…+∣wn∣2)12\\|\\mathbf{w}\\|_{2}=\\left(\\left|w_{1}\\right|^{2}+\\left|w_{2}\\right|^{2}+\\ldots+\\left|w_{n}\\right|^{2}\\right)^{\\frac{1}{2}}∥w∥2​=(∣w1​∣2+∣w2​∣2+…+∣wn​∣2)21",{"_index":1679,"t":{"336":{"position":[[249,182]]}}}],["w∥p=(∣w1∣p+∣w2∣p+…+∣wn∣p)1p\\|\\mathbf{w}\\|_{p}=\\left(\\left|w_{1}\\right|^{p}+\\left|w_{2}\\right|^{p}+\\ldots+\\left|w_{n}\\right|^{p}\\right)^{\\frac{1}{p}}∥w∥p​=(∣w1​∣p+∣w2​∣p+…+∣wn​∣p)p1",{"_index":1680,"t":{"336":{"position":[[439,182]]}}}],["w。就如公式15和16",{"_index":1773,"t":{"344":{"position":[[364,19]]}}}],["w上,从而使其较少为负。因此,这具有将w推向0",{"_index":1793,"t":{"348":{"position":[[66,31]]}}}],["w为bbox的宽高,c为该bbox是否存在object",{"_index":561,"t":{"149":{"position":[[337,30]]}}}],["w和b",{"_index":1775,"t":{"344":{"position":[[408,5]]}}}],["w归约为文法开始符号",{"_index":206,"t":{"75":{"position":[[103,31]]}}}],["w推向0如何有助于l1正则化中的过拟合?如上所述,随着w变为0,我们正在通过降低变量的重要性来减少功能的数量。在上面的方程式中,我们看到x_2,x_4和x_5",{"_index":1805,"t":{"348":{"position":[[418,91]]}}}],["w更小。相反,在等式3.2中,如果w",{"_index":1792,"t":{"348":{"position":[[39,24]]}}}],["w的符号就可以实现l1",{"_index":1789,"t":{"346":{"position":[[236,36]]}}}],["x",{"_index":6,"t":{"3":{"position":[[74,3],[328,1],[480,1],[488,1],[492,1],[616,1],[628,1],[732,1]]},"5":{"position":[[66,3],[320,1],[472,1],[480,1],[484,1],[608,1],[620,1],[724,1]]},"19":{"position":[[74,3],[328,1],[480,1],[488,1],[492,1],[616,1],[628,1],[732,1]]},"21":{"position":[[66,3],[320,1],[472,1],[480,1],[484,1],[608,1],[620,1],[724,1]]},"195":{"position":[[100,1],[124,2],[147,1],[155,1]]},"197":{"position":[[91,2],[114,1],[131,1]]},"226":{"position":[[41,3],[69,5],[84,3]]},"236":{"position":[[4,2]]},"248":{"position":[[525,8],[1534,2],[1550,2]]},"250":{"position":[[290,8],[317,8]]},"299":{"position":[[118,3]]},"305":{"position":[[241,12],[274,10]]},"325":{"position":[[43,3],[305,1]]},"327":{"position":[[42,1],[50,1],[54,1],[178,1],[190,1],[294,1]]},"340":{"position":[[847,1]]},"344":{"position":[[199,3]]},"383":{"position":[[382,3],[873,3],[1092,2],[1135,1],[1157,1],[1162,2],[1171,1],[1917,3],[1986,2]]}}}],["x(w",{"_index":1736,"t":{"340":{"position":[[1418,3],[1672,3],[1758,3],[1802,3],[2095,3]]},"342":{"position":[[65,3]]}}}],["x)%mod",{"_index":716,"t":{"226":{"position":[[115,9]]}}}],["x)=11+e−x(1)\\sigma(x",{"_index":3,"t":{"3":{"position":[[31,23]]},"5":{"position":[[23,23]]},"19":{"position":[[31,23]]},"21":{"position":[[23,23]]},"325":{"position":[[0,23]]}}}],["x)>(i",{"_index":713,"t":{"226":{"position":[[28,10]]}}}],["x+b",{"_index":1695,"t":{"340":{"position":[[182,3],[322,3],[621,3],[1422,3],[1676,3],[1762,3],[1806,3],[2099,3]]},"342":{"position":[[69,3]]}}}],["x+by^​=wx+b",{"_index":1688,"t":{"338":{"position":[[71,11]]}}}],["x.reshap",{"_index":1920,"t":{"383":{"position":[[393,10]]}}}],["x.shape[0",{"_index":2001,"t":{"383":{"position":[[2099,11]]}}}],["x.to(devic",{"_index":1954,"t":{"383":{"position":[[1139,13],[1175,12],[1993,13]]}}}],["x86",{"_index":210,"t":{"85":{"position":[[89,12]]},"95":{"position":[[835,5]]}}}],["x86/elf",{"_index":263,"t":{"95":{"position":[[854,7]]}}}],["x86_64与i386",{"_index":247,"t":{"95":{"position":[[561,16]]}}}],["x_{1",{"_index":1796,"t":{"348":{"position":[[272,5]]}}}],["x_{1}+w_{2",{"_index":1684,"t":{"336":{"position":[[747,11]]}}}],["x_{2}+0.3251",{"_index":1798,"t":{"348":{"position":[[285,12]]}}}],["x_{2}+\\ldots+w_{n",{"_index":1685,"t":{"336":{"position":[[759,18]]}}}],["x_{3}+0.0009",{"_index":1799,"t":{"348":{"position":[[298,12]]}}}],["x_{4}+0.0001",{"_index":1800,"t":{"348":{"position":[[311,12]]}}}],["x_{5",{"_index":1801,"t":{"348":{"position":[[324,5]]}}}],["x_{6",{"_index":1803,"t":{"348":{"position":[[337,5]]}}}],["x_{n}+by^​=w1​x1​+w2​x2​+…+wn​xn​+b",{"_index":1686,"t":{"336":{"position":[[778,35]]}}}],["xi",{"_index":2051,"t":{"402":{"position":[[621,2]]}}}],["xlim=[1",{"_index":1981,"t":{"383":{"position":[[1714,8]]}}}],["xor异或等非线性问题,导致第一次ai",{"_index":2056,"t":{"404":{"position":[[43,35]]}}}],["xxx.plasmoid",{"_index":2140,"t":{"439":{"position":[[349,12]]}}}],["xxx的可能取值为x=x1,x2,...,xnx=x_1,x_2,...,x_nx=x1​,x2​,...,xn​,而取值事件xix_ixi​发生的概率为pip_ipi",{"_index":1607,"t":{"332":{"position":[[152,106]]}}}],["xxx,则在第一个任务完成后,每隔xxx",{"_index":344,"t":{"103":{"position":[[0,55]]}}}],["x为100时,sigmoid(x)就接近于0",{"_index":25,"t":{"3":{"position":[[377,24]]},"5":{"position":[[369,24]]},"19":{"position":[[377,24]]},"21":{"position":[[369,24]]},"325":{"position":[[354,24]]}}}],["x和y)。仅根据公式中的模型和数据更新权重会导致过拟合,从而导致模型泛化性不好。另一方面,在等式15,16中,w",{"_index":1776,"t":{"344":{"position":[[418,91]]}}}],["x,输出所有x",{"_index":1554,"t":{"305":{"position":[[174,13]]}}}],["y",{"_index":54,"t":{"7":{"position":[[196,1]]},"23":{"position":[[196,1]]},"149":{"position":[[301,2]]},"195":{"position":[[111,1],[134,1],[138,1],[175,2]]},"197":{"position":[[101,1],[105,1]]},"226":{"position":[[47,4],[77,4],[90,4]]},"330":{"position":[[172,1]]},"340":{"position":[[1426,3],[1810,2]]},"344":{"position":[[203,144]]},"383":{"position":[[1095,1],[1188,1],[1237,3],[1317,3],[1361,2],[1921,2],[1989,1],[2051,2],[2127,3]]}}}],["y)+2",{"_index":1753,"t":{"340":{"position":[[2103,4]]}}}],["y)+\\lambda",{"_index":1743,"t":{"340":{"position":[[1680,10],[1766,11]]}}}],["y)^{2",{"_index":1694,"t":{"340":{"position":[[167,6],[186,6]]}}}],["y)^{2}+\\lambda",{"_index":1707,"t":{"340":{"position":[[625,14]]}}}],["y)^{2}+\\lambda|w|l1​=(wx+b−y)2+λ∣w",{"_index":1698,"t":{"340":{"position":[[326,35]]}}}],["y)h=2x(wx+b−i",{"_index":1757,"t":{"342":{"position":[[73,14]]}}}],["y*z",{"_index":2045,"t":{"402":{"position":[[401,4]]}}}],["y.numel",{"_index":1957,"t":{"383":{"position":[[1241,10],[2131,10]]}}}],["y.to(devic",{"_index":1955,"t":{"383":{"position":[[1192,12],[2007,12]]}}}],["y2",{"_index":2091,"t":{"426":{"position":[[192,5]]}}}],["y^=0.4561x1−0.0007x2+0.3251x3+0.0009x4+0.0001x5−0.9142x6−0.553\\hat{y}=0.4561",{"_index":1795,"t":{"348":{"position":[[195,76]]}}}],["y^=w1x1+w2x2+…+wnxn+b\\hat{y}=w_{1",{"_index":1683,"t":{"336":{"position":[[712,34]]}}}],["y^=wx+b\\hat{y}=w",{"_index":1687,"t":{"338":{"position":[[54,16]]}}}],["y_at",{"_index":1779,"t":{"344":{"position":[[782,7]]}}}],["y_hat",{"_index":1995,"t":{"383":{"position":[[2020,5]]}}}],["yay",{"_index":233,"t":{"93":{"position":[[87,3]]},"444":{"position":[[28,3]]}}}],["ye",{"_index":1529,"t":{"291":{"position":[[282,6]]}}}],["yi",{"_index":2049,"t":{"402":{"position":[[590,2],[616,2],[636,2]]}}}],["yi,j=∑a,bva,b∗xi+a,j+b=∑a=−δδ∑b=−δδva,b∗xia,j+b(4)y_{i,j}=\\sum_{a,b}{v_{a,b}*x_{i+a,j+b}}=\\sum_{a",{"_index":1503,"t":{"287":{"position":[[32,98]]}}}],["yi,j=∑a,bvi,j,a,b∗xi+a,j+b=∑a,bva,b∗xi+a,j+b(3)y_{i,j}=\\sum_{a,b}{v_{i,j,a,b}*x_{i+a,j+b}}=\\sum_{a,b}{v_{a,b}*x_{i+a,j+b",{"_index":1499,"t":{"285":{"position":[[513,122]]}}}],["yi,j=∑h,wwi,j,h,w∗xh,w(1)y_{i,j}=\\sum_{h,w}{w_{i,j,h,w}*x_{h,w",{"_index":1493,"t":{"285":{"position":[[77,64]]}}}],["yi,j=∑h,wwi,j,h,w∗xh,w=∑a,bvi,j,a,b∗xi+a,j+b(2)y_{i,j}=\\sum_{h,w}{w_{i,j,h,w}*x_{h,w}}=\\sum_{a,b}{v_{i,j,a,b}*x_{i+a,j+b",{"_index":1496,"t":{"285":{"position":[[241,122]]}}}],["yolov1",{"_index":546,"t":{"149":{"position":[[42,14],[57,14],[521,25]]}}}],["yolov2引入了anchor机制代替bbox,将图像划分为13×1313",{"_index":575,"t":{"151":{"position":[[30,37]]}}}],["yolov5使用cspnet实现特征融合,csp",{"_index":581,"t":{"153":{"position":[[9,52]]}}}],["yolo损失函数分为分类损失以及回归损失,可以在分类损失中引入foc",{"_index":600,"t":{"165":{"position":[[50,36]]}}}],["yyy是样本的真实标签,zzz",{"_index":2047,"t":{"402":{"position":[[435,22]]}}}],["y−f(x)∥2=∑[y−f(x)]22(2)\\vert",{"_index":53,"t":{"7":{"position":[[166,29]]},"23":{"position":[[166,29]]}}}],["y−f(x)∥2=∑[y−f(x)]22(6)\\vert",{"_index":1596,"t":{"330":{"position":[[142,29]]}}}],["y为bbox左上角坐标,h",{"_index":560,"t":{"149":{"position":[[322,14]]}}}],["y的大部分将由y_hat",{"_index":1777,"t":{"344":{"position":[[712,16]]}}}],["y轴数据、x轴数据,win参数是窗口的唯一标识,opt可选字典中可以给出窗口的title和legend",{"_index":2077,"t":{"424":{"position":[[49,66]]}}}],["z=w∗x+bz=w*x+bz=w∗x+b",{"_index":2042,"t":{"402":{"position":[[217,60]]}}}],["zi",{"_index":2050,"t":{"402":{"position":[[595,2]]}}}],["zip",{"_index":133,"t":{"11":{"position":[[1007,5],[1013,43]]},"13":{"position":[[997,5],[1003,43]]},"397":{"position":[[993,5],[999,39]]}}}],["zip(a",{"_index":137,"t":{"11":{"position":[[1117,6],[1311,6]]},"13":{"position":[[1107,6],[1301,6]]},"397":{"position":[[1099,6],[1293,6]]}}}],["zsh的配置文件:~/.zshrc",{"_index":2104,"t":{"432":{"position":[[37,17]]}}}],["zzz带入阈值函数,如符号函数sign(z)sign(z)sign(z",{"_index":2043,"t":{"402":{"position":[[278,56]]}}}]],"pipeline":["stemmer"]}}] \ No newline at end of file +[{"documents":[{"i":1,"t":"激活函数与Loss的梯度","u":"/blog/激活函数与Loss的梯度","b":[]},{"i":7,"t":"","u":"/blog/archive","b":[]},{"i":8,"t":"理论基础","u":"/blog/理论知识","b":[]},{"i":10,"t":"基础数学知识","u":"/blog/数学基础","b":[]},{"i":14,"t":"激活函数与Loss的梯度","u":"/blog/deep_learning/激活函数与Loss的梯度","b":[]},{"i":20,"t":"PyTorch基础","u":"/blog/PyTroch基础","b":[]},{"i":24,"t":"GeekOS project 0的实现","u":"/docs/课程学习/操作系统课设/GeekOS project 0","b":["操作系统课设"]},{"i":30,"t":"编译原理笔记","u":"/docs/课程学习/编译原理/编译原理复习笔记","b":["编译原理"]},{"i":88,"t":"Linux系统下GeekOS的环境配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","b":["操作系统课设"]},{"i":102,"t":"Transformer and self-attention","u":"/docs/课程学习/计算机图形学/Transformer and self-attention","b":["计算机图形学"]},{"i":105,"t":"体系结构复习笔记","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","b":["计算机体系结构"]},{"i":113,"t":"Welcome","u":"/docs/课程学习/intro","b":["课程学习"]},{"i":117,"t":"鸣谢","u":"/docs/鸣谢/intro","b":["饮水思源"]},{"i":119,"t":"大数除法","u":"/docs/推免/机试/大数除法","b":["机试"]},{"i":127,"t":"简历面试准备","u":"/docs/推免/简历/简历面试准备","b":["简历"]},{"i":169,"t":"数据结构","u":"/docs/推免/计算机基础综合/数据结构","b":["计算机基础综合"]},{"i":174,"t":"线性代数","u":"/docs/推免/数学/线性代数","b":["数学"]},{"i":180,"t":"概率论","u":"/docs/推免/数学/概率论","b":["数学"]},{"i":183,"t":"夏令营面试数学部分复习","u":"/docs/推免/数学/夏令营面试数学部分复习","b":["数学"]},{"i":188,"t":"Welcome","u":"/docs/推免/intro","b":["推免"]},{"i":192,"t":"反序输出","u":"/docs/Algorithms/题解/反序输出","b":["题解"]},{"i":198,"t":"一维前缀和(刷出一道墙)","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","b":["题解"]},{"i":204,"t":"排列组合(求30的倍数)","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","b":["题解"]},{"i":210,"t":"STL模板","u":"/docs/Algorithms/STL模板","b":[]},{"i":232,"t":"Welcome","u":"/docs/Algorithms/intro","b":["算法"]},{"i":236,"t":"池化层","u":"/docs/Deep Learning/基础知识/池化层","b":["基础知识"]},{"i":243,"t":"机试技巧与STL","u":"/docs/Algorithms/机试技巧与STL","b":[]},{"i":310,"t":"从全连接到卷积","u":"/docs/Deep Learning/基础知识/从全连接到卷积","b":["基础知识"]},{"i":317,"t":"激活函数与Loss的梯度","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","b":["基础知识"]},{"i":328,"t":"卷积层","u":"/docs/Deep Learning/基础知识/卷积层","b":["基础知识"]},{"i":333,"t":"深度可分离卷积","u":"/docs/Deep Learning/基础知识/深度可分离卷积","b":["基础知识"]},{"i":341,"t":"K-fold cross-validation","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","b":["基础知识"]},{"i":348,"t":"对于正则化的理解","u":"/docs/Deep Learning/基础知识/对于正则化的理解","b":["基础知识"]},{"i":364,"t":"关于Logistic Regression","u":"/docs/Deep Learning/基础知识/Logistic Regression","b":["基础知识"]},{"i":373,"t":"正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","b":["基础知识"]},{"i":381,"t":"AlexNet","u":"/docs/Deep Learning/经典模型/AlexNet","b":["经典模型"]},{"i":388,"t":"LeNet","u":"/docs/Deep Learning/经典模型/LeNet","b":["经典模型"]},{"i":395,"t":"Attention Is All You Need","u":"/docs/Deep Learning/论文笔记/Attention Is All You Need","b":["论文笔记"]},{"i":396,"t":"Perceptron","u":"/docs/Deep Learning/经典模型/Perceptron","b":["经典模型"]},{"i":403,"t":"PyTorch基础","u":"/docs/Deep Learning/基础知识/PyTroch基础","b":["基础知识"]},{"i":406,"t":"Self-Attention","u":"/docs/Deep Learning/论文笔记/Self-Attention","b":["论文笔记"]},{"i":412,"t":"Welcome","u":"/docs/Deep Learning/intro","b":["深度学习"]},{"i":416,"t":"Visdom可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","b":["实用技巧"]},{"i":427,"t":"终端代理","u":"/docs/Linux/实用工具/终端代理","b":["实用工具"]},{"i":435,"t":"如何让你的Kde Plasma看起来更像macOS","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","b":["客制化"]},{"i":440,"t":"挂载Windows磁盘为只读文件","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","b":["问题解决"]},{"i":445,"t":"Welcome","u":"/docs/Linux/intro","b":["Linux"]},{"i":449,"t":"告示栏","u":"/docs/Others/博客搭建/告示栏","b":["博客搭建"]},{"i":451,"t":"要准备的问题","u":"/docs/Others/面试/要准备的问题","b":["面试"]},{"i":459,"t":"Welcome","u":"/docs/Others/intro","b":["其他"]}],"index":{"version":"2.3.9","fields":["t"],"fieldVectors":[["t/1",[0,2.875]],["t/7",[]],["t/8",[1,0.878]],["t/10",[1,0.878]],["t/14",[0,2.875]],["t/20",[2,3.236]],["t/24",[3,2.185,4,2.185,5,2.185]],["t/30",[1,0.878]],["t/88",[6,3.784]],["t/102",[7,2.185,8,1.869,9,1.66]],["t/105",[1,0.878]],["t/113",[10,2.211]],["t/117",[1,0.878]],["t/119",[1,0.878]],["t/127",[1,0.878]],["t/169",[1,0.878]],["t/174",[1,0.878]],["t/180",[1,0.878]],["t/183",[1,0.878]],["t/188",[10,2.211]],["t/192",[1,0.878]],["t/198",[1,0.878]],["t/204",[11,3.784]],["t/210",[12,3.236]],["t/232",[10,2.211]],["t/236",[1,0.878]],["t/243",[12,3.236]],["t/310",[1,0.878]],["t/317",[0,2.875]],["t/328",[1,0.878]],["t/333",[1,0.878]],["t/341",[13,1.804,14,1.804,15,1.804,16,1.804]],["t/348",[1,0.878]],["t/364",[17,2.771,18,2.771]],["t/373",[1,0.878]],["t/381",[19,3.784]],["t/388",[20,3.784]],["t/395",[9,2.105,21,2.771]],["t/396",[22,3.784]],["t/403",[2,3.236]],["t/406",[8,2.369,9,2.105]],["t/412",[10,2.211]],["t/416",[23,3.784]],["t/427",[1,0.878]],["t/435",[24,2.771,25,2.771]],["t/440",[26,3.784]],["t/445",[10,2.211]],["t/449",[1,0.878]],["t/451",[1,0.878]],["t/459",[10,2.211]]],"invertedIndex":[["",{"_index":1,"t":{"8":{"position":[[0,4]]},"10":{"position":[[0,6]]},"30":{"position":[[0,6]]},"105":{"position":[[0,8]]},"117":{"position":[[0,2]]},"119":{"position":[[0,4]]},"127":{"position":[[0,6]]},"169":{"position":[[0,4]]},"174":{"position":[[0,4]]},"180":{"position":[[0,3]]},"183":{"position":[[0,11]]},"192":{"position":[[0,4]]},"198":{"position":[[0,12]]},"236":{"position":[[0,3]]},"310":{"position":[[0,7]]},"328":{"position":[[0,3]]},"333":{"position":[[0,7]]},"348":{"position":[[0,8]]},"373":{"position":[[0,8]]},"427":{"position":[[0,4]]},"449":{"position":[[0,3]]},"451":{"position":[[0,6]]}}}],["0",{"_index":5,"t":{"24":{"position":[[15,4]]}}}],["30",{"_index":11,"t":{"204":{"position":[[0,12]]}}}],["alexnet",{"_index":19,"t":{"381":{"position":[[0,7]]}}}],["attent",{"_index":9,"t":{"102":{"position":[[21,9]]},"395":{"position":[[0,9]]},"406":{"position":[[5,9]]}}}],["cross",{"_index":15,"t":{"341":{"position":[[7,5]]}}}],["fold",{"_index":14,"t":{"341":{"position":[[2,4]]}}}],["geeko",{"_index":3,"t":{"24":{"position":[[0,6]]}}}],["k",{"_index":13,"t":{"341":{"position":[[0,1]]}}}],["kde",{"_index":24,"t":{"435":{"position":[[0,8]]}}}],["lenet",{"_index":20,"t":{"388":{"position":[[0,5]]}}}],["linux系统下geeko",{"_index":6,"t":{"88":{"position":[[0,19]]}}}],["logist",{"_index":17,"t":{"364":{"position":[[0,10]]}}}],["loss",{"_index":0,"t":{"1":{"position":[[0,12]]},"14":{"position":[[0,12]]},"317":{"position":[[0,12]]}}}],["need",{"_index":21,"t":{"395":{"position":[[21,4]]}}}],["perceptron",{"_index":22,"t":{"396":{"position":[[0,10]]}}}],["plasma看起来更像maco",{"_index":25,"t":{"435":{"position":[[9,16]]}}}],["project",{"_index":4,"t":{"24":{"position":[[7,7]]}}}],["pytorch",{"_index":2,"t":{"20":{"position":[[0,9]]},"403":{"position":[[0,9]]}}}],["regress",{"_index":18,"t":{"364":{"position":[[11,10]]}}}],["self",{"_index":8,"t":{"102":{"position":[[16,4]]},"406":{"position":[[0,4]]}}}],["stl",{"_index":12,"t":{"210":{"position":[[0,5]]},"243":{"position":[[0,8]]}}}],["transform",{"_index":7,"t":{"102":{"position":[[0,11]]}}}],["valid",{"_index":16,"t":{"341":{"position":[[13,10]]}}}],["visdom",{"_index":23,"t":{"416":{"position":[[0,9]]}}}],["welcom",{"_index":10,"t":{"113":{"position":[[0,7]]},"188":{"position":[[0,7]]},"232":{"position":[[0,7]]},"412":{"position":[[0,7]]},"445":{"position":[[0,7]]},"459":{"position":[[0,7]]}}}],["window",{"_index":26,"t":{"440":{"position":[[0,16]]}}}]],"pipeline":["stemmer"]}},{"documents":[{"i":3,"t":"一、激活函数","u":"/blog/激活函数与Loss的梯度","h":"#一激活函数","p":1},{"i":5,"t":"二、损失函数","u":"/blog/激活函数与Loss的梯度","h":"#二损失函数","p":1},{"i":12,"t":"矩阵 / 向量的内积和外积","u":"/blog/数学基础","h":"#矩阵--向量的内积和外积","p":10},{"i":16,"t":"一、激活函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#一激活函数","p":14},{"i":18,"t":"二、损失函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#二损失函数","p":14},{"i":22,"t":"一、常用函数部分","u":"/blog/PyTroch基础","h":"#一常用函数部分","p":20},{"i":26,"t":"1. 编写C语言代码","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#1-编写c语言代码","p":24},{"i":28,"t":"2. 使用Linux的编译系统对C语言代码进行编译","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#2-使用linux的编译系统对c语言代码进行编译","p":24},{"i":31,"t":"第一章:前言","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第一章前言","p":30},{"i":32,"t":"1.1 编译程序的逻辑结构","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#11-编译程序的逻辑结构","p":30},{"i":34,"t":"1.2 前端和后端","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#12-前端和后端","p":30},{"i":36,"t":"1.3 遍的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#13-遍的概念","p":30},{"i":38,"t":"第二章:文法和语言","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第二章文法和语言","p":30},{"i":39,"t":"2.1 句型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#21-句型","p":30},{"i":41,"t":"2.2 句子:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#22-句子","p":30},{"i":43,"t":"2.3 文法的分类:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#23-文法的分类","p":30},{"i":45,"t":"2.4 最左/右推导:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#24-最左右推导","p":30},{"i":47,"t":"第三章:词法分析","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第三章词法分析","p":30},{"i":48,"t":"3.1 正规文法转换成正规式","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#31-正规文法转换成正规式","p":30},{"i":49,"t":"3.2 有穷自动机(FA)","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#32-有穷自动机fa","p":30},{"i":51,"t":"3.3 正规式RE与有穷自动机FA的互相转化","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#33-正规式re与有穷自动机fa的互相转化","p":30},{"i":52,"t":"3.4 正规文法RM与有穷自动机FA的互相转化","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#34-正规文法rm与有穷自动机fa的互相转化","p":30},{"i":53,"t":"第四章:自顶向下语法分析方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第四章自顶向下语法分析方法","p":30},{"i":55,"t":"1. FIRST集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#1-first集的定义","p":30},{"i":56,"t":"2. Follow集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-follow集的定义","p":30},{"i":58,"t":"3. SELECT集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-select集的定义","p":30},{"i":60,"t":"4. LL(1)文法的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#4-ll1文法的定义","p":30},{"i":61,"t":"5. LL(1)文法的判别","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-ll1文法的判别","p":30},{"i":63,"t":"6. 预测分析表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#6-预测分析表","p":30},{"i":65,"t":"7. 非LL(1)文法到LL(1)文法的等价变换","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#7-非ll1文法到ll1文法的等价变换","p":30},{"i":67,"t":"第五章:自底向上语法分析方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第五章自底向上语法分析方法","p":30},{"i":68,"t":"5.1 概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#51-概念","p":30},{"i":70,"t":"5.2 方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#52-方法","p":30},{"i":72,"t":"5.3 工作过程","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#53-工作过程","p":30},{"i":73,"t":"5.4 移入-归约分析器的4种动作","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#54-移入-归约分析器的4种动作","p":30},{"i":75,"t":"5.5 重要题型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#55-重要题型","p":30},{"i":77,"t":"概念总结","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#概念总结","p":30},{"i":78,"t":"1 编译程序各阶段功能","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#1-编译程序各阶段功能","p":30},{"i":80,"t":"2 语法分析方法的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-语法分析方法的概念","p":30},{"i":82,"t":"3 翻译模式","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-翻译模式","p":30},{"i":84,"t":"4 属性文法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#4-属性文法","p":30},{"i":86,"t":"5 符号表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-符号表","p":30},{"i":89,"t":"一、必须要知道的原理","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#一必须要知道的原理","p":88},{"i":90,"t":"1. GeekOS:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-geekos","p":88},{"i":92,"t":"2. bochs:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-bochs","p":88},{"i":94,"t":"3. 二者之间的关系","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#3-二者之间的关系","p":88},{"i":96,"t":"二、安装与配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#二安装与配置","p":88},{"i":98,"t":"1. 安装","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-安装","p":88},{"i":100,"t":"2. 配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-配置","p":88},{"i":103,"t":"一、模型概述","u":"/docs/课程学习/计算机图形学/Transformer and self-attention","h":"#一模型概述","p":102},{"i":104,"t":"二、CNN与self-attention","u":"/docs/课程学习/计算机图形学/Transformer and self-attention","h":"#二cnn与self-attention","p":102},{"i":106,"t":"一、基础知识","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#一基础知识","p":105},{"i":108,"t":"二、指令系统","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#二指令系统","p":105},{"i":109,"t":"三、存储系统","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#三存储系统","p":105},{"i":111,"t":"四、流水线","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#四流水线","p":105},{"i":115,"t":"支持我!","u":"/docs/课程学习/intro","h":"#支持我","p":113},{"i":121,"t":"思路","u":"/docs/推免/机试/大数除法","h":"#思路","p":119},{"i":123,"t":"参考代码","u":"/docs/推免/机试/大数除法","h":"#参考代码","p":119},{"i":125,"t":"扩展","u":"/docs/推免/机试/大数除法","h":"#扩展","p":119},{"i":128,"t":"一、U-2-Net","u":"/docs/推免/简历/简历面试准备","h":"#一u-2-net","p":127},{"i":129,"t":"(一)SOD任务","u":"/docs/推免/简历/简历面试准备","h":"#一sod任务","p":127},{"i":131,"t":"(二)网络结构","u":"/docs/推免/简历/简历面试准备","h":"#二网络结构","p":127},{"i":133,"t":"(三)损失函数","u":"/docs/推免/简历/简历面试准备","h":"#三损失函数","p":127},{"i":135,"t":"(四)深度可分离卷积","u":"/docs/推免/简历/简历面试准备","h":"#四深度可分离卷积","p":127},{"i":137,"t":"二、YOLO","u":"/docs/推免/简历/简历面试准备","h":"#二yolo","p":127},{"i":138,"t":"(一)mAP","u":"/docs/推免/简历/简历面试准备","h":"#一map","p":127},{"i":140,"t":"(二)YOLOv1","u":"/docs/推免/简历/简历面试准备","h":"#二yolov1","p":127},{"i":142,"t":"(二)YOLOv2","u":"/docs/推免/简历/简历面试准备","h":"#二yolov2","p":127},{"i":144,"t":"(三)YOLOv5","u":"/docs/推免/简历/简历面试准备","h":"#三yolov5","p":127},{"i":146,"t":"三、CBAM","u":"/docs/推免/简历/简历面试准备","h":"#三cbam","p":127},{"i":148,"t":"(一)总体结构","u":"/docs/推免/简历/简历面试准备","h":"#一总体结构","p":127},{"i":150,"t":"(二)通道注意力","u":"/docs/推免/简历/简历面试准备","h":"#二通道注意力","p":127},{"i":152,"t":"(三)空间注意力","u":"/docs/推免/简历/简历面试准备","h":"#三空间注意力","p":127},{"i":154,"t":"(四)其他注意事项","u":"/docs/推免/简历/简历面试准备","h":"#四其他注意事项","p":127},{"i":156,"t":"四、Focal Loss","u":"/docs/推免/简历/简历面试准备","h":"#四focal-loss","p":127},{"i":158,"t":"五、SENet","u":"/docs/推免/简历/简历面试准备","h":"#五senet","p":127},{"i":160,"t":"六、自注意力机制","u":"/docs/推免/简历/简历面试准备","h":"#六自注意力机制","p":127},{"i":162,"t":"七、自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#七自我介绍","p":127},{"i":163,"t":"(一)英文自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#一英文自我介绍","p":127},{"i":165,"t":"(二)西电广研院自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#二西电广研院自我介绍","p":127},{"i":167,"t":"(三)电子科技大学自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#三电子科技大学自我介绍","p":127},{"i":170,"t":"树","u":"/docs/推免/计算机基础综合/数据结构","h":"#树","p":169},{"i":172,"t":"图","u":"/docs/推免/计算机基础综合/数据结构","h":"#图","p":169},{"i":176,"t":"一、基础知识","u":"/docs/推免/数学/线性代数","h":"#一基础知识","p":174},{"i":178,"t":"二、面试常考问题","u":"/docs/推免/数学/线性代数","h":"#二面试常考问题","p":174},{"i":181,"t":"面试常考问题","u":"/docs/推免/数学/概率论","h":"#面试常考问题","p":180},{"i":184,"t":"一、线性代数","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#一线性代数","p":183},{"i":186,"t":"二、概率论","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#二概率论","p":183},{"i":190,"t":"支持我!","u":"/docs/推免/intro","h":"#支持我","p":188},{"i":194,"t":"参考代码","u":"/docs/Algorithms/题解/反序输出","h":"#参考代码","p":192},{"i":196,"t":"题解","u":"/docs/Algorithms/题解/反序输出","h":"#题解","p":192},{"i":200,"t":"参考代码","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#参考代码","p":198},{"i":202,"t":"题解","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#题解","p":198},{"i":206,"t":"参考代码","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#参考代码","p":204},{"i":208,"t":"题解","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#题解","p":204},{"i":212,"t":"vector","u":"/docs/Algorithms/STL模板","h":"#vector","p":210},{"i":214,"t":"pair","u":"/docs/Algorithms/STL模板","h":"#pair","p":210},{"i":216,"t":"string","u":"/docs/Algorithms/STL模板","h":"#string","p":210},{"i":218,"t":"query","u":"/docs/Algorithms/STL模板","h":"#query","p":210},{"i":220,"t":"priority_queue","u":"/docs/Algorithms/STL模板","h":"#priority_queue","p":210},{"i":222,"t":"stack","u":"/docs/Algorithms/STL模板","h":"#stack","p":210},{"i":224,"t":"deque","u":"/docs/Algorithms/STL模板","h":"#deque","p":210},{"i":226,"t":"set/multiset","u":"/docs/Algorithms/STL模板","h":"#setmultiset","p":210},{"i":228,"t":"map/multimap","u":"/docs/Algorithms/STL模板","h":"#mapmultimap","p":210},{"i":230,"t":"biset","u":"/docs/Algorithms/STL模板","h":"#biset","p":210},{"i":234,"t":"支持我!","u":"/docs/Algorithms/intro","h":"#支持我","p":232},{"i":237,"t":"一、卷积对像素位置信息是敏感的","u":"/docs/Deep Learning/基础知识/池化层","h":"#一卷积对像素位置信息是敏感的","p":236},{"i":239,"t":"二、池化层的作用","u":"/docs/Deep Learning/基础知识/池化层","h":"#二池化层的作用","p":236},{"i":241,"t":"三、池化的实现","u":"/docs/Deep Learning/基础知识/池化层","h":"#三池化的实现","p":236},{"i":245,"t":"vs2018 快捷键","u":"/docs/Algorithms/机试技巧与STL","h":"#vs2018-快捷键","p":243},{"i":247,"t":"头文件","u":"/docs/Algorithms/机试技巧与STL","h":"#头文件","p":243},{"i":248,"t":"标准c库","u":"/docs/Algorithms/机试技巧与STL","h":"#标准c库","p":243},{"i":250,"t":"c++ STL","u":"/docs/Algorithms/机试技巧与STL","h":"#c-stl","p":243},{"i":252,"t":"常用头","u":"/docs/Algorithms/机试技巧与STL","h":"#常用头","p":243},{"i":254,"t":"常用宏定义","u":"/docs/Algorithms/机试技巧与STL","h":"#常用宏定义","p":243},{"i":256,"t":"结构体","u":"/docs/Algorithms/机试技巧与STL","h":"#结构体","p":243},{"i":257,"t":"定义","u":"/docs/Algorithms/机试技巧与STL","h":"#定义","p":243},{"i":259,"t":"初始化","u":"/docs/Algorithms/机试技巧与STL","h":"#初始化","p":243},{"i":261,"t":"运算符重载","u":"/docs/Algorithms/机试技巧与STL","h":"#运算符重载","p":243},{"i":263,"t":"c++new的使用","u":"/docs/Algorithms/机试技巧与STL","h":"#cnew的使用","p":243},{"i":264,"t":"常规","u":"/docs/Algorithms/机试技巧与STL","h":"#常规","p":243},{"i":266,"t":"动态申请列大小固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请列大小固定的二维数组","p":243},{"i":268,"t":"动态申请大小不固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请大小不固定的二维数组","p":243},{"i":270,"t":"常用STL","u":"/docs/Algorithms/机试技巧与STL","h":"#常用stl","p":243},{"i":272,"t":"简述","u":"/docs/Algorithms/机试技巧与STL","h":"#简述","p":243},{"i":274,"t":"algorithm","u":"/docs/Algorithms/机试技巧与STL","h":"#algorithm","p":243},{"i":276,"t":"vector","u":"/docs/Algorithms/机试技巧与STL","h":"#vector","p":243},{"i":278,"t":"list","u":"/docs/Algorithms/机试技巧与STL","h":"#list","p":243},{"i":280,"t":"string","u":"/docs/Algorithms/机试技巧与STL","h":"#string","p":243},{"i":282,"t":"pair","u":"/docs/Algorithms/机试技巧与STL","h":"#pair","p":243},{"i":284,"t":"map","u":"/docs/Algorithms/机试技巧与STL","h":"#map","p":243},{"i":286,"t":"stack","u":"/docs/Algorithms/机试技巧与STL","h":"#stack","p":243},{"i":288,"t":"queue","u":"/docs/Algorithms/机试技巧与STL","h":"#queue","p":243},{"i":290,"t":"set","u":"/docs/Algorithms/机试技巧与STL","h":"#set","p":243},{"i":292,"t":"multiset","u":"/docs/Algorithms/机试技巧与STL","h":"#multiset","p":243},{"i":294,"t":"bitset","u":"/docs/Algorithms/机试技巧与STL","h":"#bitset","p":243},{"i":296,"t":"图模板","u":"/docs/Algorithms/机试技巧与STL","h":"#图模板","p":243},{"i":297,"t":"不带出入度的最简模板","u":"/docs/Algorithms/机试技巧与STL","h":"#不带出入度的最简模板","p":243},{"i":299,"t":"带出入度的 (2019推免试题)","u":"/docs/Algorithms/机试技巧与STL","h":"#带出入度的-2019推免试题","p":243},{"i":301,"t":"图算法:找出u到v的所有路径-邻接表","u":"/docs/Algorithms/机试技巧与STL","h":"#图算法找出u到v的所有路径-邻接表","p":243},{"i":303,"t":"树模板","u":"/docs/Algorithms/机试技巧与STL","h":"#树模板","p":243},{"i":304,"t":"注释版","u":"/docs/Algorithms/机试技巧与STL","h":"#注释版","p":243},{"i":306,"t":"简化版(Val As Index,若数据不在1~N内,则可能越界)","u":"/docs/Algorithms/机试技巧与STL","h":"#简化版val-as-index若数据不在1n内则可能越界","p":243},{"i":308,"t":"简化版(Val Not As Index,可以存任意的 Val)","u":"/docs/Algorithms/机试技巧与STL","h":"#简化版val-not-as-index可以存任意的-val","p":243},{"i":311,"t":"一、卷积的诞生&核心特征","u":"/docs/Deep Learning/基础知识/从全连接到卷积","h":"#一卷积的诞生核心特征","p":310},{"i":313,"t":"二、重新考察全连接层","u":"/docs/Deep Learning/基础知识/从全连接到卷积","h":"#二重新考察全连接层","p":310},{"i":315,"t":"三、总结","u":"/docs/Deep Learning/基础知识/从全连接到卷积","h":"#三总结","p":310},{"i":318,"t":"一、激活函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#一激活函数","p":317},{"i":319,"t":"1. Sigmoid函数 / Logistic函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#1-sigmoid函数--logistic函数","p":317},{"i":321,"t":"2. 线性整流单元(Rectified Linear Unit, ReLU)","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#2-线性整流单元rectified-linear-unit-relu","p":317},{"i":323,"t":"二、损失函数","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#二损失函数","p":317},{"i":324,"t":"1. Mean Squared Error 均方误差","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#1-mean-squared-error-均方误差","p":317},{"i":326,"t":"2. Cross Entropy Loss 交叉熵损失","u":"/docs/Deep Learning/基础知识/激活函数与Loss的梯度","h":"#2-cross-entropy-loss-交叉熵损失","p":317},{"i":329,"t":"一、1x1卷积","u":"/docs/Deep Learning/基础知识/卷积层","h":"#一1x1卷积","p":328},{"i":331,"t":"二、二维卷积层","u":"/docs/Deep Learning/基础知识/卷积层","h":"#二二维卷积层","p":328},{"i":335,"t":"常规卷积","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#常规卷积","p":333},{"i":337,"t":"(1)逐通道卷积-Depthwise Convolution","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#1逐通道卷积-depthwise-convolution","p":333},{"i":339,"t":"(2)逐点卷积-Pointwise Convolution","u":"/docs/Deep Learning/基础知识/深度可分离卷积","h":"#2逐点卷积-pointwise-convolution","p":333},{"i":342,"t":"What is k-fold cross-validation?","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#what-is-k-fold-cross-validation","p":341},{"i":344,"t":"How does k-fold cross-validation work?","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#how-does-k-fold-cross-validation-work","p":341},{"i":346,"t":"Summary","u":"/docs/Deep Learning/基础知识/K-fold Cross-validation","h":"#summary","p":341},{"i":350,"t":"L1和L2是什么?","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1和l2是什么","p":348},{"i":352,"t":"Model","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#model","p":348},{"i":354,"t":"损失函数","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#损失函数","p":348},{"i":356,"t":"如何避免过拟合","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#如何避免过拟合","p":348},{"i":358,"t":"有正则化与没有正则化","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#有正则化与没有正则化","p":348},{"i":360,"t":"L1 vs L2","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1-vs-l2","p":348},{"i":362,"t":"L1的稀疏性","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1的稀疏性","p":348},{"i":365,"t":"一、什么是Logistic Regression","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#一什么是logistic-regression","p":364},{"i":367,"t":"二、逻辑回归(Logistic Regression)和线性回归(Linear Regression)","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#二逻辑回归logistic-regression和线性回归linear-regression","p":364},{"i":369,"t":"三、逻辑回归到底是回归任务(Regression)还是分类任务(Classification)?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#三逻辑回归到底是回归任务regression还是分类任务classification","p":364},{"i":371,"t":"四、为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#四为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数","p":364},{"i":375,"t":"一、什么是正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#一什么是正则化","p":373},{"i":377,"t":"二、L1正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#二l1正则化","p":373},{"i":379,"t":"三、L2正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#三l2正则化与权重衰退","p":373},{"i":382,"t":"背景","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#背景","p":381},{"i":384,"t":"新的概念和技术","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#新的概念和技术","p":381},{"i":386,"t":"与LeNet比较","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#与lenet比较","p":381},{"i":389,"t":"背景","u":"/docs/Deep Learning/经典模型/LeNet","h":"#背景","p":388},{"i":391,"t":"代码实现","u":"/docs/Deep Learning/经典模型/LeNet","h":"#代码实现","p":388},{"i":393,"t":"问题","u":"/docs/Deep Learning/经典模型/LeNet","h":"#问题","p":388},{"i":397,"t":"一、什么是感知机","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#一什么是感知机","p":396},{"i":399,"t":"二、详细原理","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#二详细原理","p":396},{"i":401,"t":"三、总结","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#三总结","p":396},{"i":404,"t":"一、常用函数部分","u":"/docs/Deep Learning/基础知识/PyTroch基础","h":"#一常用函数部分","p":403},{"i":407,"t":"CNN的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#cnn的局限性","p":406},{"i":408,"t":"输入与输出的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#输入与输出的局限性","p":406},{"i":410,"t":"关联上下文的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#关联上下文的局限性","p":406},{"i":414,"t":"支持我!","u":"/docs/Deep Learning/intro","h":"#支持我","p":412},{"i":418,"t":"一、安装Visdom","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#一安装visdom","p":416},{"i":420,"t":"二、Visdom的使用","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#二visdom的使用","p":416},{"i":421,"t":"0. Visdom的启动","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#0-visdom的启动","p":416},{"i":423,"t":"1. 单窗口单曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#1-单窗口单曲线的可视化","p":416},{"i":425,"t":"2. 单窗口多曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#2-单窗口多曲线的可视化","p":416},{"i":429,"t":"一、编写脚本","u":"/docs/Linux/实用工具/终端代理","h":"#一编写脚本","p":427},{"i":431,"t":"二、关联终端配置文件","u":"/docs/Linux/实用工具/终端代理","h":"#二关联终端配置文件","p":427},{"i":433,"t":"三、使用","u":"/docs/Linux/实用工具/终端代理","h":"#三使用","p":427},{"i":436,"t":"一、latte-dock","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#一latte-dock","p":435},{"i":438,"t":"二、Kde Plasmoids","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#二kde-plasmoids","p":435},{"i":441,"t":"一、发生原因","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#一发生原因","p":440},{"i":443,"t":"二、解决方案","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#二解决方案","p":440},{"i":447,"t":"支持我!","u":"/docs/Linux/intro","h":"#支持我","p":445},{"i":453,"t":"一、自我介绍部分","u":"/docs/Others/面试/要准备的问题","h":"#一自我介绍部分","p":451},{"i":455,"t":"二、专业课面试题","u":"/docs/Others/面试/要准备的问题","h":"#二专业课面试题","p":451},{"i":457,"t":"三、自由面试题","u":"/docs/Others/面试/要准备的问题","h":"#三自由面试题","p":451},{"i":461,"t":"支持我!","u":"/docs/Others/intro","h":"#支持我","p":459}],"index":{"version":"2.3.9","fields":["t"],"fieldVectors":[["t/3",[0,0.507]],["t/5",[0,0.507]],["t/12",[0,0.565]],["t/16",[0,0.507]],["t/18",[0,0.507]],["t/22",[0,0.507]],["t/26",[1,2.633,2,3.487]],["t/28",[3,2.548,4,4.211]],["t/31",[0,0.507]],["t/32",[0,0.381,5,4.211]],["t/34",[0,0.381,6,4.211]],["t/36",[0,0.381,7,4.211]],["t/38",[0,0.507]],["t/39",[0,0.381,8,4.211]],["t/41",[0,0.381,9,4.211]],["t/43",[0,0.381,10,4.211]],["t/45",[0,0.381,11,4.211]],["t/47",[0,0.507]],["t/48",[0,0.381,12,4.211]],["t/49",[13,4.211,14,4.211]],["t/51",[15,4.211,16,4.211]],["t/52",[17,4.211,18,4.211]],["t/53",[0,0.507]],["t/55",[1,2.633,19,4.211]],["t/56",[3,2.548,20,4.211]],["t/58",[21,3.487,22,4.211]],["t/60",[23,3.487,24,3.774]],["t/61",[24,3.774,25,3.774]],["t/63",[0,0.381,26,4.211]],["t/65",[27,4.211,28,4.211]],["t/67",[0,0.507]],["t/68",[0,0.381,29,4.211]],["t/70",[0,0.381,30,4.211]],["t/72",[0,0.381,31,4.211]],["t/73",[0,0.306,23,2.795,32,3.376]],["t/75",[0,0.381,33,4.211]],["t/77",[0,0.507]],["t/78",[0,0.381,1,2.633]],["t/80",[0,0.381,3,2.548]],["t/82",[0,0.381,21,3.487]],["t/84",[0,0.381,23,3.487]],["t/86",[0,0.381,25,3.774]],["t/89",[0,0.507]],["t/90",[1,2.633,34,4.211]],["t/92",[3,2.548,35,4.211]],["t/94",[0,0.381,21,3.487]],["t/96",[0,0.507]],["t/98",[0,0.381,1,2.633]],["t/100",[0,0.381,3,2.548]],["t/103",[0,0.507]],["t/104",[36,4.211,37,4.211]],["t/106",[0,0.507]],["t/108",[0,0.507]],["t/109",[0,0.507]],["t/111",[0,0.507]],["t/115",[0,0.507]],["t/121",[0,0.507]],["t/123",[0,0.507]],["t/125",[0,0.507]],["t/128",[3,2.042,38,3.376,39,3.376]],["t/129",[40,5.596]],["t/131",[0,0.507]],["t/133",[0,0.507]],["t/135",[0,0.507]],["t/137",[41,5.596]],["t/138",[42,5.016]],["t/140",[43,5.596]],["t/142",[44,5.596]],["t/144",[45,5.596]],["t/146",[46,5.596]],["t/148",[0,0.507]],["t/150",[0,0.507]],["t/152",[0,0.507]],["t/154",[0,0.507]],["t/156",[47,4.211,48,3.774]],["t/158",[49,5.596]],["t/160",[0,0.507]],["t/162",[0,0.507]],["t/163",[0,0.507]],["t/165",[0,0.507]],["t/167",[0,0.507]],["t/170",[0,0.507]],["t/172",[0,0.507]],["t/176",[0,0.507]],["t/178",[0,0.507]],["t/181",[0,0.507]],["t/184",[0,0.507]],["t/186",[0,0.507]],["t/190",[0,0.507]],["t/194",[0,0.507]],["t/196",[0,0.507]],["t/200",[0,0.507]],["t/202",[0,0.507]],["t/206",[0,0.507]],["t/208",[0,0.507]],["t/212",[50,5.016]],["t/214",[51,5.016]],["t/216",[52,5.016]],["t/218",[53,5.596]],["t/220",[54,5.596]],["t/222",[55,5.016]],["t/224",[56,5.596]],["t/226",[57,5.596]],["t/228",[58,5.596]],["t/230",[59,5.596]],["t/234",[0,0.507]],["t/237",[0,0.507]],["t/239",[0,0.507]],["t/241",[0,0.507]],["t/245",[0,0.381,60,4.211]],["t/247",[0,0.507]],["t/248",[2,4.634]],["t/250",[2,3.487,61,3.774]],["t/252",[0,0.507]],["t/254",[0,0.507]],["t/256",[0,0.507]],["t/257",[0,0.507]],["t/259",[0,0.507]],["t/261",[0,0.507]],["t/263",[62,5.596]],["t/264",[0,0.507]],["t/266",[0,0.507]],["t/268",[0,0.507]],["t/270",[61,5.016]],["t/272",[0,0.507]],["t/274",[63,5.596]],["t/276",[50,5.016]],["t/278",[64,5.596]],["t/280",[52,5.016]],["t/282",[51,5.016]],["t/284",[42,5.016]],["t/286",[55,5.016]],["t/288",[65,5.596]],["t/290",[66,5.596]],["t/292",[67,5.596]],["t/294",[68,5.596]],["t/296",[0,0.507]],["t/297",[0,0.507]],["t/299",[0,0.381,69,4.211]],["t/301",[0,0.381,70,4.211]],["t/303",[0,0.507]],["t/304",[0,0.507]],["t/306",[71,3.774,72,4.211]],["t/308",[71,4.614,73,3.376]],["t/311",[0,0.507]],["t/313",[0,0.507]],["t/315",[0,0.507]],["t/318",[0,0.507]],["t/319",[0,0.255,1,1.762,74,2.817,75,2.332]],["t/321",[3,1.462,76,2.417,77,2.417,78,2.417,79,2.417]],["t/323",[0,0.507]],["t/324",[0,0.219,1,1.511,80,2.417,81,2.417,82,2.417]],["t/326",[0,0.219,3,1.462,48,2.166,83,2.001,84,2.417]],["t/329",[85,5.596]],["t/331",[0,0.507]],["t/335",[0,0.507]],["t/337",[1,2.111,86,3.376,87,3.026]],["t/339",[3,2.042,87,3.026,88,3.376]],["t/342",[83,2.332,89,2.525,90,2.525,91,2.525]],["t/344",[83,2.001,89,2.166,90,2.166,91,2.166,92,2.417]],["t/346",[93,5.596]],["t/350",[94,5.596]],["t/352",[95,5.596]],["t/354",[0,0.507]],["t/356",[0,0.507]],["t/358",[0,0.507]],["t/360",[96,2.795,97,3.376,98,3.026]],["t/362",[96,4.634]],["t/365",[75,3.487,99,3.774]],["t/367",[75,2.795,99,3.026,100,3.376]],["t/369",[101,5.596]],["t/371",[0,0.507]],["t/375",[0,0.507]],["t/377",[96,4.634]],["t/379",[98,5.016]],["t/382",[0,0.507]],["t/384",[0,0.507]],["t/386",[102,5.596]],["t/389",[0,0.507]],["t/391",[0,0.507]],["t/393",[0,0.507]],["t/397",[0,0.507]],["t/399",[0,0.507]],["t/401",[0,0.507]],["t/404",[0,0.507]],["t/407",[103,5.596]],["t/408",[0,0.507]],["t/410",[0,0.507]],["t/414",[0,0.507]],["t/418",[104,4.634]],["t/420",[104,4.634]],["t/421",[104,3.487,105,4.211]],["t/423",[0,0.381,1,2.633]],["t/425",[0,0.381,3,2.548]],["t/429",[0,0.507]],["t/431",[0,0.507]],["t/433",[0,0.507]],["t/436",[106,4.211,107,4.211]],["t/438",[108,4.211,109,4.211]],["t/441",[0,0.507]],["t/443",[0,0.507]],["t/447",[0,0.507]],["t/453",[0,0.507]],["t/455",[0,0.507]],["t/457",[0,0.507]],["t/461",[0,0.507]]],"invertedIndex":[["",{"_index":0,"t":{"3":{"position":[[0,6]]},"5":{"position":[[0,6]]},"12":{"position":[[0,2],[3,1],[5,8]]},"16":{"position":[[0,6]]},"18":{"position":[[0,6]]},"22":{"position":[[0,8]]},"31":{"position":[[0,6]]},"32":{"position":[[4,9]]},"34":{"position":[[4,5]]},"36":{"position":[[4,4]]},"38":{"position":[[0,9]]},"39":{"position":[[4,2]]},"41":{"position":[[4,3]]},"43":{"position":[[4,6]]},"45":{"position":[[4,7]]},"47":{"position":[[0,8]]},"48":{"position":[[4,10]]},"53":{"position":[[0,14]]},"63":{"position":[[3,5]]},"67":{"position":[[0,14]]},"68":{"position":[[4,2]]},"70":{"position":[[4,2]]},"72":{"position":[[4,4]]},"73":{"position":[[4,2]]},"75":{"position":[[4,4]]},"77":{"position":[[0,4]]},"78":{"position":[[2,9]]},"80":{"position":[[2,9]]},"82":{"position":[[2,4]]},"84":{"position":[[2,4]]},"86":{"position":[[2,3]]},"89":{"position":[[0,10]]},"94":{"position":[[3,7]]},"96":{"position":[[0,7]]},"98":{"position":[[3,2]]},"100":{"position":[[3,2]]},"103":{"position":[[0,6]]},"106":{"position":[[0,6]]},"108":{"position":[[0,6]]},"109":{"position":[[0,6]]},"111":{"position":[[0,5]]},"115":{"position":[[0,4]]},"121":{"position":[[0,2]]},"123":{"position":[[0,4]]},"125":{"position":[[0,2]]},"131":{"position":[[0,7]]},"133":{"position":[[0,7]]},"135":{"position":[[0,10]]},"148":{"position":[[0,7]]},"150":{"position":[[0,8]]},"152":{"position":[[0,8]]},"154":{"position":[[0,9]]},"160":{"position":[[0,8]]},"162":{"position":[[0,6]]},"163":{"position":[[0,9]]},"165":{"position":[[0,12]]},"167":{"position":[[0,13]]},"170":{"position":[[0,1]]},"172":{"position":[[0,1]]},"176":{"position":[[0,6]]},"178":{"position":[[0,8]]},"181":{"position":[[0,6]]},"184":{"position":[[0,6]]},"186":{"position":[[0,5]]},"190":{"position":[[0,4]]},"194":{"position":[[0,4]]},"196":{"position":[[0,2]]},"200":{"position":[[0,4]]},"202":{"position":[[0,2]]},"206":{"position":[[0,4]]},"208":{"position":[[0,2]]},"234":{"position":[[0,4]]},"237":{"position":[[0,15]]},"239":{"position":[[0,8]]},"241":{"position":[[0,7]]},"245":{"position":[[7,3]]},"247":{"position":[[0,3]]},"252":{"position":[[0,3]]},"254":{"position":[[0,5]]},"256":{"position":[[0,3]]},"257":{"position":[[0,2]]},"259":{"position":[[0,3]]},"261":{"position":[[0,5]]},"264":{"position":[[0,2]]},"266":{"position":[[0,14]]},"268":{"position":[[0,14]]},"272":{"position":[[0,2]]},"296":{"position":[[0,3]]},"297":{"position":[[0,10]]},"299":{"position":[[0,5]]},"301":{"position":[[15,3]]},"303":{"position":[[0,3]]},"304":{"position":[[0,3]]},"311":{"position":[[0,12]]},"313":{"position":[[0,10]]},"315":{"position":[[0,4]]},"318":{"position":[[0,6]]},"319":{"position":[[13,1]]},"323":{"position":[[0,6]]},"324":{"position":[[22,4]]},"326":{"position":[[22,5]]},"331":{"position":[[0,7]]},"335":{"position":[[0,4]]},"354":{"position":[[0,4]]},"356":{"position":[[0,7]]},"358":{"position":[[0,10]]},"371":{"position":[[0,31]]},"375":{"position":[[0,8]]},"382":{"position":[[0,2]]},"384":{"position":[[0,7]]},"389":{"position":[[0,2]]},"391":{"position":[[0,4]]},"393":{"position":[[0,2]]},"397":{"position":[[0,8]]},"399":{"position":[[0,6]]},"401":{"position":[[0,4]]},"404":{"position":[[0,8]]},"408":{"position":[[0,9]]},"410":{"position":[[0,9]]},"414":{"position":[[0,4]]},"423":{"position":[[3,10]]},"425":{"position":[[3,10]]},"429":{"position":[[0,6]]},"431":{"position":[[0,10]]},"433":{"position":[[0,4]]},"441":{"position":[[0,6]]},"443":{"position":[[0,6]]},"447":{"position":[[0,4]]},"453":{"position":[[0,8]]},"455":{"position":[[0,8]]},"457":{"position":[[0,7]]},"461":{"position":[[0,4]]}}}],["0",{"_index":105,"t":{"421":{"position":[[0,2]]}}}],["1",{"_index":1,"t":{"26":{"position":[[0,2]]},"55":{"position":[[0,2]]},"78":{"position":[[0,1]]},"90":{"position":[[0,2]]},"98":{"position":[[0,2]]},"319":{"position":[[0,2]]},"324":{"position":[[0,2]]},"337":{"position":[[0,8]]},"423":{"position":[[0,2]]}}}],["1.1",{"_index":5,"t":{"32":{"position":[[0,3]]}}}],["1.2",{"_index":6,"t":{"34":{"position":[[0,3]]}}}],["1.3",{"_index":7,"t":{"36":{"position":[[0,3]]}}}],["1x1",{"_index":85,"t":{"329":{"position":[[0,7]]}}}],["2",{"_index":3,"t":{"28":{"position":[[0,2]]},"56":{"position":[[0,2]]},"80":{"position":[[0,1]]},"92":{"position":[[0,2]]},"100":{"position":[[0,2]]},"128":{"position":[[4,1]]},"321":{"position":[[0,2]]},"326":{"position":[[0,2]]},"339":{"position":[[0,7]]},"425":{"position":[[0,2]]}}}],["2.1",{"_index":8,"t":{"39":{"position":[[0,3]]}}}],["2.2",{"_index":9,"t":{"41":{"position":[[0,3]]}}}],["2.3",{"_index":10,"t":{"43":{"position":[[0,3]]}}}],["2.4",{"_index":11,"t":{"45":{"position":[[0,3]]}}}],["2019",{"_index":69,"t":{"299":{"position":[[6,10]]}}}],["3",{"_index":21,"t":{"58":{"position":[[0,2]]},"82":{"position":[[0,1]]},"94":{"position":[[0,2]]}}}],["3.1",{"_index":12,"t":{"48":{"position":[[0,3]]}}}],["3.2",{"_index":13,"t":{"49":{"position":[[0,3]]}}}],["3.3",{"_index":15,"t":{"51":{"position":[[0,3]]}}}],["3.4",{"_index":17,"t":{"52":{"position":[[0,3]]}}}],["4",{"_index":23,"t":{"60":{"position":[[0,2]]},"73":{"position":[[7,10]]},"84":{"position":[[0,1]]}}}],["5",{"_index":25,"t":{"61":{"position":[[0,2]]},"86":{"position":[[0,1]]}}}],["5.1",{"_index":29,"t":{"68":{"position":[[0,3]]}}}],["5.2",{"_index":30,"t":{"70":{"position":[[0,3]]}}}],["5.3",{"_index":31,"t":{"72":{"position":[[0,3]]}}}],["5.4",{"_index":32,"t":{"73":{"position":[[0,3]]}}}],["5.5",{"_index":33,"t":{"75":{"position":[[0,3]]}}}],["6",{"_index":26,"t":{"63":{"position":[[0,2]]}}}],["7",{"_index":27,"t":{"65":{"position":[[0,2]]}}}],["algorithm",{"_index":63,"t":{"274":{"position":[[0,9]]}}}],["attent",{"_index":37,"t":{"104":{"position":[[11,9]]}}}],["biset",{"_index":59,"t":{"230":{"position":[[0,5]]}}}],["bitset",{"_index":68,"t":{"294":{"position":[[0,6]]}}}],["boch",{"_index":35,"t":{"92":{"position":[[3,6]]}}}],["c",{"_index":2,"t":{"26":{"position":[[3,7]]},"248":{"position":[[0,4]]},"250":{"position":[[0,3]]}}}],["c++new",{"_index":62,"t":{"263":{"position":[[0,9]]}}}],["cbam",{"_index":46,"t":{"146":{"position":[[0,6]]}}}],["cnn",{"_index":103,"t":{"407":{"position":[[0,7]]}}}],["cnn与self",{"_index":36,"t":{"104":{"position":[[0,10]]}}}],["convolut",{"_index":87,"t":{"337":{"position":[[19,11]]},"339":{"position":[[18,11]]}}}],["cross",{"_index":83,"t":{"326":{"position":[[3,5]]},"342":{"position":[[15,5]]},"344":{"position":[[16,5]]}}}],["depthwis",{"_index":86,"t":{"337":{"position":[[9,9]]}}}],["dequ",{"_index":56,"t":{"224":{"position":[[0,5]]}}}],["dock",{"_index":107,"t":{"436":{"position":[[8,4]]}}}],["entropi",{"_index":84,"t":{"326":{"position":[[9,7]]}}}],["error",{"_index":82,"t":{"324":{"position":[[16,5]]}}}],["fa",{"_index":14,"t":{"49":{"position":[[4,9]]}}}],["first",{"_index":19,"t":{"55":{"position":[[3,9]]}}}],["focal",{"_index":47,"t":{"156":{"position":[[0,7]]}}}],["fold",{"_index":90,"t":{"342":{"position":[[10,4]]},"344":{"position":[[11,4]]}}}],["follow",{"_index":20,"t":{"56":{"position":[[3,10]]}}}],["geeko",{"_index":34,"t":{"90":{"position":[[3,7]]}}}],["index",{"_index":73,"t":{"308":{"position":[[15,12]]}}}],["index,若数据不在1~n",{"_index":72,"t":{"306":{"position":[[11,22]]}}}],["k",{"_index":89,"t":{"342":{"position":[[8,1]]},"344":{"position":[[9,1]]}}}],["kde",{"_index":108,"t":{"438":{"position":[[0,5]]}}}],["l1",{"_index":96,"t":{"360":{"position":[[0,2]]},"362":{"position":[[0,6]]},"377":{"position":[[0,7]]}}}],["l1和l2",{"_index":94,"t":{"350":{"position":[[0,9]]}}}],["l2",{"_index":98,"t":{"360":{"position":[[6,2]]},"379":{"position":[[0,12]]}}}],["latt",{"_index":106,"t":{"436":{"position":[[0,7]]}}}],["lenet",{"_index":102,"t":{"386":{"position":[[0,8]]}}}],["linear",{"_index":77,"t":{"321":{"position":[[20,6]]}}}],["linux的编译系统对c",{"_index":4,"t":{"28":{"position":[[3,22]]}}}],["list",{"_index":64,"t":{"278":{"position":[[0,4]]}}}],["ll(1",{"_index":24,"t":{"60":{"position":[[3,10]]},"61":{"position":[[3,10]]}}}],["ll(1)文法到ll(1",{"_index":28,"t":{"65":{"position":[[3,21]]}}}],["logist",{"_index":75,"t":{"319":{"position":[[15,10]]},"365":{"position":[[0,13]]},"367":{"position":[[0,15]]}}}],["loss",{"_index":48,"t":{"156":{"position":[[8,4]]},"326":{"position":[[17,4]]}}}],["map",{"_index":42,"t":{"138":{"position":[[0,6]]},"284":{"position":[[0,3]]}}}],["map/multimap",{"_index":58,"t":{"228":{"position":[[0,12]]}}}],["mean",{"_index":80,"t":{"324":{"position":[[3,4]]}}}],["model",{"_index":95,"t":{"352":{"position":[[0,5]]}}}],["multiset",{"_index":67,"t":{"292":{"position":[[0,8]]}}}],["net",{"_index":39,"t":{"128":{"position":[[6,3]]}}}],["pair",{"_index":51,"t":{"214":{"position":[[0,4]]},"282":{"position":[[0,4]]}}}],["plasmoid",{"_index":109,"t":{"438":{"position":[[6,9]]}}}],["pointwis",{"_index":88,"t":{"339":{"position":[[8,9]]}}}],["priority_queu",{"_index":54,"t":{"220":{"position":[[0,14]]}}}],["queri",{"_index":53,"t":{"218":{"position":[[0,5]]}}}],["queue",{"_index":65,"t":{"288":{"position":[[0,5]]}}}],["rectifi",{"_index":76,"t":{"321":{"position":[[3,16]]}}}],["regress",{"_index":99,"t":{"365":{"position":[[14,10]]},"367":{"position":[[40,11]]}}}],["regression)和线性回归(linear",{"_index":100,"t":{"367":{"position":[[16,23]]}}}],["regression)还是分类任务(classif",{"_index":101,"t":{"369":{"position":[[0,48]]}}}],["relu",{"_index":79,"t":{"321":{"position":[[33,5]]}}}],["re与有穷自动机fa",{"_index":16,"t":{"51":{"position":[[4,18]]}}}],["rm与有穷自动机fa",{"_index":18,"t":{"52":{"position":[[4,19]]}}}],["select",{"_index":22,"t":{"58":{"position":[[3,10]]}}}],["senet",{"_index":49,"t":{"158":{"position":[[0,7]]}}}],["set",{"_index":66,"t":{"290":{"position":[[0,3]]}}}],["set/multiset",{"_index":57,"t":{"226":{"position":[[0,12]]}}}],["sigmoid",{"_index":74,"t":{"319":{"position":[[3,9]]}}}],["sod",{"_index":40,"t":{"129":{"position":[[0,8]]}}}],["squar",{"_index":81,"t":{"324":{"position":[[8,7]]}}}],["stack",{"_index":55,"t":{"222":{"position":[[0,5]]},"286":{"position":[[0,5]]}}}],["stl",{"_index":61,"t":{"250":{"position":[[4,3]]},"270":{"position":[[0,5]]}}}],["string",{"_index":52,"t":{"216":{"position":[[0,6]]},"280":{"position":[[0,6]]}}}],["summari",{"_index":93,"t":{"346":{"position":[[0,7]]}}}],["u",{"_index":38,"t":{"128":{"position":[[0,3]]}}}],["unit",{"_index":78,"t":{"321":{"position":[[27,5]]}}}],["u到v",{"_index":70,"t":{"301":{"position":[[0,14]]}}}],["val",{"_index":71,"t":{"306":{"position":[[0,7]]},"308":{"position":[[0,7],[28,4]]}}}],["valid",{"_index":91,"t":{"342":{"position":[[21,11]]},"344":{"position":[[22,10]]}}}],["vector",{"_index":50,"t":{"212":{"position":[[0,6]]},"276":{"position":[[0,6]]}}}],["visdom",{"_index":104,"t":{"418":{"position":[[0,10]]},"420":{"position":[[0,11]]},"421":{"position":[[3,9]]}}}],["vs",{"_index":97,"t":{"360":{"position":[[3,2]]}}}],["vs2018",{"_index":60,"t":{"245":{"position":[[0,6]]}}}],["work",{"_index":92,"t":{"344":{"position":[[33,5]]}}}],["yolo",{"_index":41,"t":{"137":{"position":[[0,6]]}}}],["yolov1",{"_index":43,"t":{"140":{"position":[[0,9]]}}}],["yolov2",{"_index":44,"t":{"142":{"position":[[0,9]]}}}],["yolov5",{"_index":45,"t":{"144":{"position":[[0,9]]}}}]],"pipeline":["stemmer"]}},{"documents":[{"i":2,"t":"一、激活函数​ Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"激活函数与Loss的梯度","u":"/blog/激活函数与Loss的梯度","h":"","p":1},{"i":4,"t":"Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"一、激活函数","u":"/blog/激活函数与Loss的梯度","h":"#一激活函数","p":1},{"i":6,"t":"Mean Squared Error 均方误差 L2范数是对元素求平方和后再开根号,需要.pow(2)后才可作为损失函数 微小的误差可能对网络性能带来极大的影响 LossMSE=∑[y−f(x)]2(1)Loss_{MSE} = \\sum{[{y - f(x)]^2}} \\tag{1}LossMSE​=∑[y−f(x)]2(1) ∥y−f(x)∥2=∑[y−f(x)]22(2)\\Vert y - f(x) \\Vert_2 = \\sqrt[2]{\\sum{[y - f(x)]^2}} \\tag{2}∥y−f(x)∥2​=2∑[y−f(x)]2​(2) Cross Entropy Loss 交叉熵损失 binary 二分类问题 multi-class 多分类问题 经常与softmax激活函数搭配使用","s":"二、损失函数","u":"/blog/激活函数与Loss的梯度","h":"#二损失函数","p":1},{"i":9,"t":"梯度下降算法需要求整个数据集上的计算损失函数以及梯度,计算代价太大,因此常采用小批量随机梯度下降。在每个batch上计算损失函数以及梯度,近似损失。此时,batchsize越大,近似效果越好。 随机梯度下降的随机指的就是使用的数据是随机选择的mini batch数据,即Mini-Batch Gradient Descent。 然而,batchsize越小,收敛效果越好。随机梯度下降理论上带来了噪音,batchsize较小时带来的噪音较大,可以增加模型的鲁棒性。 前向传播(Forward Propagation):已知权重、偏置和输入,计算出损失函数 反向传播(Backward Propagation):求出损失函数对于每一个权重的偏导 交叉熵常来用于衡量两个概率之间的区别 交叉熵损失函数的梯度是真实概率和预测概率的区别 softmax激活函数常用于多分类问题。经过softmax函数后得到的输出为一组概率,概率非负且相加和为1 需要看的论文:ResNet,U-Net 训练优化方法: 初始化:恺明初始化方法 学习率: 动量:逃出局部最小值,可直观理解为惯性","s":"理论基础","u":"/blog/理论知识","h":"","p":8},{"i":11,"t":"矩阵 / 向量的内积和外积​ 点乘:内积又称标量积,运算结果为标量,是将两个矩阵或向量的对应元素做乘法 叉乘:外积又称向量积,运算结果为向量,遵循行列式乘法规则","s":"基础数学知识","u":"/blog/数学基础","h":"","p":10},{"i":13,"t":"点乘:内积又称标量积,运算结果为标量,是将两个矩阵或向量的对应元素做乘法 叉乘:外积又称向量积,运算结果为向量,遵循行列式乘法规则","s":"矩阵 / 向量的内积和外积","u":"/blog/数学基础","h":"#矩阵--向量的内积和外积","p":10},{"i":15,"t":"一、激活函数​ Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"激活函数与Loss的梯度","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"","p":14},{"i":17,"t":"Sigmoid函数 / Logistic函数 σ(x)=11+e−x(1)\\sigma(x) = \\frac{1}{1 + e^{-x}} \\tag{1}σ(x)=1+e−x1​(1) dσdx=σ(1−σ)(2)\\frac{{\\rm d}\\sigma}{{\\rm d}x} = \\sigma{(1 - \\sigma)} \\tag{2}dxdσ​=σ(1−σ)(2) 优点:可以将数据压缩至[0, 1)区间内,有较大实用意义 致命问题:在输入值较小或较大时,Sigmoid函数的梯度趋近于零,会导致网络参数长时间得不到更新,即梯度弥散问题 from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.sigmoid(x) # 当x为100时,sigmoid(x)就接近于0了 线性整流单元(Rectified Linear Unit, ReLU) f(x)={0x<0xx≥0(1)f(x) = \\begin{cases} 0 & x < 0\\\\ x & x \\geq 0\\\\ \\end{cases} \\tag{1}f(x)={0x​x<0x≥0​(1) df(x)dx={0x<01x≥0(2)\\frac {{\\text d}f(x)}{{\\text d}x} = \\begin{cases} 0 & x < 0\\\\ 1 & x \\geq 0\\\\ \\end{cases} \\tag{2}dxdf(x)​={01​x<0x≥0​(2) from torch.nn import functional as F import torch x = torch.linspace(-100, 100, 10) F.relu(x) Softmax函数 常用于多分类任务,网络的输出经过Softmax函数后,成为和为1的概率 S(yi)=eyi∑jneyj(1)S(y_i) = \\frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j}}} \\tag{1}S(yi​)=∑jn​eyjeyi​​(1)","s":"一、激活函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#一激活函数","p":14},{"i":19,"t":"Mean Squared Error 均方误差 L2范数是对元素求平方和后再开根号,需要.pow(2)后才可作为损失函数 微小的误差可能对网络性能带来极大的影响 LossMSE=∑[y−f(x)]2(1)Loss_{MSE} = \\sum{[{y - f(x)]^2}} \\tag{1}LossMSE​=∑[y−f(x)]2(1) ∥y−f(x)∥2=∑[y−f(x)]22(2)\\Vert y - f(x) \\Vert_2 = \\sqrt[2]{\\sum{[y - f(x)]^2}} \\tag{2}∥y−f(x)∥2​=2∑[y−f(x)]2​(2) Cross Entropy Loss 交叉熵损失 binary 二分类问题 multi-class 多分类问题 经常与softmax激活函数搭配使用","s":"二、损失函数","u":"/blog/deep_learning/激活函数与Loss的梯度","h":"#二损失函数","p":14},{"i":21,"t":"一、常用函数部分​ concat与stack函数 stack函数对输入的两个张量在指定的维度进行堆叠,是==创建了新的维度== concat函数对输入的张量在指定维度进行拼接,没有创建新的维度 # stack和concat函数 a = torch.rand(4, 3) # A班4位同学,每位同学3科成绩 b = torch.rand(4, 3) # B班4位同学,每位同学3科成绩 c = torch.stack((a, b), dim=0) # 理解:年级所有同学的3科成绩(假设年级只有A班和B班两个班,每个班只有四名同学) print(c.shape) # torch.Size([2, 4, 3]) d = torch.concat((a, b), dim=1) # 理解:a是A班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6科成绩 print(d.shape) # torch.Size([4, 6]) list和tensor乘法不同之处 list的*乘法是复制元素,改变list的shape tensor的*乘法是对tensor中的元素进行点乘计算 a = torch.tensor([[3, 3, 3, 3]]) b = [3] # list的*乘是复制元素进行扩展 print(a * 3) # tensor([[9, 9, 9, 9]]) print(b * 3) # [3, 3, 3] 最大值 / 最小值索引:argmax / argmin 需要通过参数dim指定操作的维度,dim的理解 官方解释:The dimension to reduce 以二维张量举例,dim=1即在每一行中选出一个最大值 / 最小值元素的索引,索引的shape应为[dim0, 1],即reduce了dim=1的维度 # 最大值最小值索引 a = torch.tensor([[0.1, 0.9, 0.3], [0.9, 0.8, 0.99], [0.1, 0.7, 0.8], [0.88, 0.1, 0.2]]) # [4, 3] print(\"argmax output: \", a.argmax(dim=0), a.argmax(dim=1)) # argmax output: tensor([1, 0, 1]) tensor([1, 2, 2, 0]) Python zip函数 zip函数可以理解为压缩,将输入的两个迭代器的==最外层==对应元素压缩为一个新的元素 a = torch.tensor([1, 2, 3]) b = torch.tensor([4, 5, 6]) c = zip(a, b) for i in c: print(i) ''' (tensor(1), tensor(4)) (tensor(2), tensor(5)) (tensor(3), tensor(6)) ''' a = torch.tensor([[1, 2, 3], [3, 2, 1]]) b = torch.tensor([[4, 5, 6], [6, 5, 4]]) c = zip(a, b) for i in c: print(i) ''' (tensor([1, 2, 3]), tensor([4, 5, 6])) (tensor([3, 2, 1]), tensor([6, 5, 4])) '''","s":"PyTorch基础","u":"/blog/PyTroch基础","h":"","p":20},{"i":23,"t":"concat与stack函数 stack函数对输入的两个张量在指定的维度进行堆叠,是==创建了新的维度== concat函数对输入的张量在指定维度进行拼接,没有创建新的维度 # stack和concat函数 a = torch.rand(4, 3) # A班4位同学,每位同学3科成绩 b = torch.rand(4, 3) # B班4位同学,每位同学3科成绩 c = torch.stack((a, b), dim=0) # 理解:年级所有同学的3科成绩(假设年级只有A班和B班两个班,每个班只有四名同学) print(c.shape) # torch.Size([2, 4, 3]) d = torch.concat((a, b), dim=1) # 理解:a是A班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6科成绩 print(d.shape) # torch.Size([4, 6]) list和tensor乘法不同之处 list的*乘法是复制元素,改变list的shape tensor的*乘法是对tensor中的元素进行点乘计算 a = torch.tensor([[3, 3, 3, 3]]) b = [3] # list的*乘是复制元素进行扩展 print(a * 3) # tensor([[9, 9, 9, 9]]) print(b * 3) # [3, 3, 3] 最大值 / 最小值索引:argmax / argmin 需要通过参数dim指定操作的维度,dim的理解 官方解释:The dimension to reduce 以二维张量举例,dim=1即在每一行中选出一个最大值 / 最小值元素的索引,索引的shape应为[dim0, 1],即reduce了dim=1的维度 # 最大值最小值索引 a = torch.tensor([[0.1, 0.9, 0.3], [0.9, 0.8, 0.99], [0.1, 0.7, 0.8], [0.88, 0.1, 0.2]]) # [4, 3] print(\"argmax output: \", a.argmax(dim=0), a.argmax(dim=1)) # argmax output: tensor([1, 0, 1]) tensor([1, 2, 2, 0]) Python zip函数 zip函数可以理解为压缩,将输入的两个迭代器的==最外层==对应元素压缩为一个新的元素 a = torch.tensor([1, 2, 3]) b = torch.tensor([4, 5, 6]) c = zip(a, b) for i in c: print(i) ''' (tensor(1), tensor(4)) (tensor(2), tensor(5)) (tensor(3), tensor(6)) ''' a = torch.tensor([[1, 2, 3], [3, 2, 1]]) b = torch.tensor([[4, 5, 6], [6, 5, 4]]) c = zip(a, b) for i in c: print(i) ''' (tensor([1, 2, 3]), tensor([4, 5, 6])) (tensor([3, 2, 1]), tensor([6, 5, 4])) '''","s":"一、常用函数部分","u":"/blog/PyTroch基础","h":"#一常用函数部分","p":20},{"i":25,"t":"在上一篇博客中我们完成了GeekOS环境的配置,下面我们来验证环境配置的成功与否以及project 0的实现。","s":"GeekOS project 0的实现","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"","p":24},{"i":27,"t":"编写geekos-version/src/projecti/src/geekos/main.c文件 编写函数project0实现检测键盘输入Ctrl+d结束线程。 void project0(){ Print(\"To Exit hit Ctrl + d.\\n\"); Keycode keycode; while(1) { if(Read_Key(&keycode)) { if(!((keycode & KEY_SPECIAL_FLAG) || (keycode & KEY_RELEASE_FLAG)))// 不是特殊键或者弹起 { int asciiCode = keycode & 0xff;//d if((keycode & KEY_CTRL_FLAG)==KEY_CTRL_FLAG && asciiCode=='d')//ctrl+d { Print(\"\\n---------Adios!---------\\n\"); # 这里需要注意素质 Exit(1); }else { Print(\"%c\",(asciiCode=='\\r') ? '\\n' : asciiCode); } } } } } 在main函数中添加以下代码,实现自定义函数的调用,创建线程。 struct Kernel_Thread *thread; thread = Start_Kernel_Thread(&project0,0,PRIORITY_NORMAL,false); 总体代码 /* * GeekOS C code entry point * Copyright (c) 2001,2003,2004 David H. Hovemeyer * Copyright (c) 2003, Jeffrey K. Hollingsworth * Copyright (c) 2004, Iulian Neamtiu * $Revision: 1.51 $ * * This is free software. You are permitted to use, * redistribute, and modify it as specified in the file \"COPYING\". */ #include #include #include #include #include #include #include #include #include #include #include void project0(){ Print(\"To Exit hit Ctrl + d.\\n\"); Keycode keycode; while(1) { if(Read_Key(&keycode)) { if(!((keycode & KEY_SPECIAL_FLAG) || (keycode & KEY_RELEASE_FLAG)))// 不是特殊键或者弹起 { int asciiCode = keycode & 0xff;//d if((keycode & KEY_CTRL_FLAG)==KEY_CTRL_FLAG && asciiCode=='d')//ctrl+d { Print(\"\\n---------Adios! Motherfucker!---------\\n\"); Exit(1); }else { Print(\"%c\",(asciiCode=='\\r') ? '\\n' : asciiCode); } } } } } /* * Kernel C code entry point. * Initializes kernel subsystems, mounts filesystems, * and spawns init process. */ void Main(struct Boot_Info* bootInfo) { Init_BSS(); Init_Screen(); Init_Mem(bootInfo); Init_CRC32(); Init_TSS(); Init_Interrupts(); Init_Scheduler(); Init_Traps(); Init_Timer(); Init_Keyboard(); Set_Current_Attr(ATTRIB(BLACK, GREEN|BRIGHT)); Print(\"Welcome to GeekOS!\\n\"); Set_Current_Attr(ATTRIB(BLACK, GRAY)); // TODO(\"Start a kernel thread to echo pressed keys and print counts\"); struct Kernel_Thread *thread; thread = Start_Kernel_Thread(&project0,0,PRIORITY_NORMAL,false); /* Now this thread is done. */ Exit(0); }","s":"1. 编写C语言代码","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#1-编写c语言代码","p":24},{"i":29,"t":"每一个项目的编译都在geekos-version/src/projecti/build文件夹下进行,即要在终端中通过cd进入该目录。 执行 make depend make 此时,该目录下会生成bochs.out、depend.mak以及fd.img文件,bochs.out文件是日志输出文件,depend.mak是编译中间生成的文件,最终生成的fd.img是最重要的GeekOS映像文件,有了它才能使用bochs运行GeekOS操作系统。感恩它! 目录下的文件应该是这样的结构: 下面就可以使用bochs运行GeekOS系统了,可以说bochs的运行依赖两个文件,一个是配置文件.bochsrc,一个是映像文件fd.img,映像文件的加载路径需要在.bochsrc文件中定义,在环境配置的博客中已经介绍过了。这里再贴一下内容。 # An example .bochsrc file. # You will need to edit these lines to reflect your system. vgaromimage: file=/usr/local/share/bochs/VGABIOS-lgpl-latest # 请根据自己的实际安装路径更改 romimage: file=/usr/local/share/bochs/BIOS-bochs-latest # 请根据自己的实际安装路径更改 megs: 8 boot: a floppya: 1_44=fd.img, status=inserted #floppya: 1_44=fd_aug.img, status=inserted log: ./bochs.out # keyboard_serial_delay: 200 # vga_update_interval: 300000 mouse: enabled=0 private_colormap: enabled=0 # i440fxsupport: enabled=0 # Uncomment this to write all bochs debugging messages to # bochs.out. This produces a lot of output, but can be very # useful for debugging the kernel. #debug: action=report 在这个目录下打开终端,执行 bochs 选择6,按下回车 可能会出现黑屏情况,这是因为进入了调试模式,终端正在等待命令,在终端输入 c 即可完成bochs的正式启动,最终的效果","s":"2. 使用Linux的编译系统对C语言代码进行编译","u":"/docs/课程学习/操作系统课设/GeekOS project 0","h":"#2-使用linux的编译系统对c语言代码进行编译","p":24},{"i":33,"t":"词法分析:分析输入串如何构成句子,得到单词序列 语法分析:分析单词序列如何构成程序,构造语法分析树 语义分析:审查语义错误,为代码生成收集类型信息 中间代码生成 代码优化 目标代码生成 表管理、错误检查和处理贯穿整个过程","s":"1.1 编译程序的逻辑结构","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#11-编译程序的逻辑结构","p":30},{"i":35,"t":"前端是指与源语言有关、与目标机无关的部分 如词法分析、语法分析、语义分析、中间代码生成、代码优化中与机器无关的部分 后端是指与目标机有关的部分 如代码优化中与机器有关的部分、目标代码的生成","s":"1.2 前端和后端","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#12-前端和后端","p":30},{"i":37,"t":"遍是指从头到尾扫描一遍源程序","s":"1.3 遍的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#13-遍的概念","p":30},{"i":40,"t":"若从文法的开始符号开始存在以下推导,则称α\\alphaα为该文法的一个句型,句型中既可以包含终结符,也可以包含非终结符,也可以是空串 S⇒∗α, α∈(VT∪VN)∗(1)S \\Rightarrow^* \\alpha,\\space \\alpha \\in (V_T \\cup V_N)^* \\tag{1}S⇒∗α, α∈(VT​∪VN​)∗(1)","s":"2.1 句型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#21-句型","p":30},{"i":42,"t":"S⇒∗β, β∈VT∗(2)S \\Rightarrow^* \\beta,\\space \\beta \\in V_T^* \\tag{2}S⇒∗β, β∈VT∗​(2) 则称β\\betaβ是该文法的句子","s":"2.2 句子:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#22-句子","p":30},{"i":44,"t":"0型文法,又称无限制文法、短语文法 1型文法,又称文有关文法 2型文法,又称上下文无关文法(Context-Free Grammar,CFG) 可用来构建语法树,语法树是上下文无关文法推导和规约的图形化表示 A→β, A∈VN, β∈(VT∪VN)∗(3)\\Alpha \\rightarrow \\beta,\\space \\Alpha \\in V_N, \\space \\beta \\in (V_T \\cup V_N)^* \\tag{3}A→β, A∈VN​, β∈(VT​∪VN​)∗(3) 3型文法,又称正规文法(Regular Grammar,RG) 左线性文法 右线性文法","s":"2.3 文法的分类:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#23-文法的分类","p":30},{"i":46,"t":"如果在推导的任何一步都是对产生式左部中的最左/右非终结符进行替换,则称为最左/右推导,其中最右推导也被成为规范推导","s":"2.4 最左/右推导:","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#24-最左右推导","p":30},{"i":50,"t":"确定的有穷自动机(DFA) DFA的定义及组成 确定的含义:在状态转换的每一步,FA根据当前的状态及扫描的输入字符,便能唯一地知道FA的下一状态。 提示 在状态转换图中的直观体现就是,在确定行表示的当前状态以及列确定的路径后,得到的目的状态不会是元素个数大于1的集合。 DFA的可接受以及接受集的定义:从开始状态开始,经过该符号串表示的路径,若能到达终态则称该符号串可被改DFA接受。 不确定的有穷自动机(NFA) NFA的确定化,即将NFA转换为DFA(子集法) 步骤: 画出DFA转换表 提示 转换表中在状态一列中,状态包含原NFA终态的集合要标*,代表其为等价DFA的终态 计算move(T,a)move(T, a)move(T,a) 计算ϵ−closure(T)\\epsilon -closure(T)ϵ−closure(T) 为转换表中的状态重命名 确定初态和终态 DFA的最小化(分割法) 步骤如下: 提示 考试时注意过程怎么写,下面使用需要三轮分割的列子演示步骤 在分割完成后,对可以化简的集合选出一个状态作为代表,删除其他多余状态,重新画图","s":"3.2 有穷自动机(FA)","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#32-有穷自动机fa","p":30},{"i":54,"t":"描述程序语法结构的规则可以使用2型文法(上下文无关语法,CFG) 语法分析方法包含确定的和不确定的分析方法,确定的语法分析方法根据输入符号,唯一选择产生式 确定的自顶向下分析方法:根据当前的输入符号唯一地确定选用哪个产生式替换相应的非终结符以往下推导","s":"第四章:自顶向下语法分析方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#第四章自顶向下语法分析方法","p":30},{"i":57,"t":"提示 FOLLOW集的求法可以按照下图技巧进行 若要求的非终结符是开始符号,则直接将#插入FOLLOW集中 在所有产生式的右部中找到要求的非终结符 看非终结符的右侧是什么元素 若无元素,则直接将该产生式左部的FOLLOW集加入到该非终结符的FOLLOW集中 若为终结符,直接将该终结符加入到FOLLOW集中 若为非终结符,将FIRST(该非终结符)减去ϵ\\epsilonϵ的所有终结符元素都加入至FOLLOW集中","s":"2. Follow集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-follow集的定义","p":30},{"i":59,"t":"提示 需要注意的是FIRST集、FOLLOW集是针对于符号串而言的,而SELECT集是针对于产生式而言的","s":"3. SELECT集的定义","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-select集的定义","p":30},{"i":62,"t":"提示 考试时注意书写过程,需要画出以下两张表","s":"5. LL(1)文法的判别","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-ll1文法的判别","p":30},{"i":64,"t":"预测分析表通过计算SELECT集得到,形如下表 行标为各非终结符,列标为输入符号,若从某一非终结符开始的产生式的SELECT集包含某一输入符号,则对应产生式就是行列确定的元素值。","s":"6. 预测分析表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#6-预测分析表","p":30},{"i":66,"t":"消除左公因子(回溯) 警告 同一非终结符的多个产生式存在共同前缀,会导致回溯现象,需要消除 消除左递归 警告 左递归文法会使递归下降分析器陷入无限循环 消除直接左递归 消除间接左递归 通过代入法变成直接左递归再消除","s":"7. 非LL(1)文法到LL(1)文法的等价变换","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#7-非ll1文法到ll1文法的等价变换","p":30},{"i":69,"t":"从的底部向顶部的方向构造语法分析树,采用最左归约的方式,即最右推导的逆过程 提示 注意辨别:自顶向下的语法分析采用最左推导的方式 最右推导是规范推导,最左归约是最右推导的逆过程,又称规范归约","s":"5.1 概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#51-概念","p":30},{"i":71,"t":"算符优先分析法 按照算符的优先关系和结合性质进行语法分析 LR分析法(重点) 规范规约:句柄作为可归约串","s":"5.2 方法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#52-方法","p":30},{"i":74,"t":"移入:将下一个输入符号移到栈顶 归约:被归约的符号串的右端处于栈顶,语法分析器在栈中确定这个串的左端非终结符来替换该串 接受:宣布语法分析过程成功完成 报错:发现一个语法错误,并调用错误恢复子程序","s":"5.4 移入-归约分析器的4种动作","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#54-移入-归约分析器的4种动作","p":30},{"i":76,"t":"前导知识:4种项目状态 归约项目:·在最后 接受项目:拓广文法的开始符号的产生式,且·在最后 移进项目:·后面是终结符VTV_TVT​ 待约项目:·后面是非终结符VNV_NVN​ 移入-归约分析 LR(0)分析表 / 构造其识别活前缀DFA https://www.bilibili.com/video/BV1pL4y1E7RE/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533 在写预测分析表的reduce项时,action的每一列都要写 SLR(1)分析表 / 构造其识别活前缀DFA https://www.bilibili.com/video/BV12u411S7Us/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533 在写预测分析表的reduce项时,只写产生式左部的FOLLOW集对应的action列 LR(1)分析表 / 构造其识别活前缀DFA https://www.bilibili.com/video/BV1Vm4y1Q7XB/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533 在构造项目集时,要加入前向搜索符;并且,在写预测分析表的reduce项时只写前向搜索符对应的action列 LALR(1)分析表 / 构造其识别活前缀DFA 在构造项目集时,要加入前向搜索符,但是要合并同心集,把相同表达式但是不同前向搜索符的前向搜索符合并,并且在写预测分析表的reduce项时只写前向搜索符集对应的action列 https://www.bilibili.com/video/BV13r4y1m7sQ/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533","s":"5.5 重要题型","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#55-重要题型","p":30},{"i":79,"t":"词法分析:从左到右扫描源程序,识别出各个单词,确定单词类型并形成单词序列,进行词法错误检查,对标识符进行登记,即符号表管理 语法分析:从词法分析输出的单词序列识别出各类短语,构造语法分析树,并进行语法错误检查 语义分析:审查程序是否具有语义错误,为代码生成阶段收集类型信息,不符合规范时报错(符号表是语义正确性检查的依据) 中间代码生成:生成中间代码,如三地址指令、四元式、波兰式、逆波兰式、树形结构等 代码优化:对代码进行等价变换以求提高执行效率,提高速度或节省空间 目标代码生成:将中间代码转化成目标机上的机器指令代码或汇编代码(符号表是对符号分配地址的依据)","s":"1 编译程序各阶段功能","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#1-编译程序各阶段功能","p":30},{"i":81,"t":"就产生语法树的方向而言,可大致分为自顶向下的语法分析和自底向上的语法分析两大类。 自顶向下的语法分析方法:主流方法为递归下降分析法。根据当前的输入符号唯一地确定选用哪个产生式替换相应的非终结符以往下推导。 自底向上的语法分析方法:将输入串w归约为文法开始符号S的过程。 提示 LR(0), SLR(1), LR(1) LR(0)文法可能存在移进-归约冲突、归约-归约冲突 SLR(1)文法在构造的过程中不存在归约-归约冲突,但有可能出现移进-归约冲突,可以由FOLLOW集解决的话则是SLR(1)文法","s":"2 语法分析方法的概念","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#2-语法分析方法的概念","p":30},{"i":83,"t":"翻译模式是适合语法制导语义计算的另一种描述形式,可以体现一种合理调用语义动作的算法。 S-翻译模式: 仅涉及综合属性的翻译模式,通常将语义动作集合置于产生式右端末尾。 L-翻译模式: 既可以包含综合属性,也可以包含继承属性。","s":"3 翻译模式","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#3-翻译模式","p":30},{"i":85,"t":"在文法基础上,为文法符号关联有特定意义的属性,并为产生式关联相应的语义动作,称之为属性文法。 S-属性文法: 只包含综合属性的属性文法成为S-属性文法 L-属性文法: 可以包含综合属性,也可以包含继承属性,但要求产生式右部的文法符号的继承属性的计算只取决于该符号左边符号的属性","s":"4 属性文法","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#4-属性文法","p":30},{"i":87,"t":"符号表是编译程序中用于收集标识符的属性信息的数据结构。 各阶段作用: 语义分析阶段:语义合法性检查的依据 目标代码生成阶段:对符号名进行地址分配的依据","s":"5 符号表","u":"/docs/课程学习/编译原理/编译原理复习笔记","h":"#5-符号表","p":30},{"i":91,"t":"GeekOS是一个基于x86体系结构的微操作系统内核. 由美国马理兰大学的教师开发, 主要用于操作系统课程设计的教育. 出于教学目的, 这个系统内核设计简单, 却又兼备实用性, 它可以运行在真正的X86 PC硬件平台. 在下载好GeekOS后, 在geekos-version/src/目录下会存在project0-project6这7个文件夹, 分别代表GeekOS设计的7个学习任务. 在环境搭建完成之后, 我们进行的每一个项目的代码编写几乎都在geekos-version/src/projecti/src/geekos/文件夹下, 每一个项目的编译都在geekos-version/src/projecti/build文件夹下进行, 即要在终端中通过cd进入该目录, 再执行make depend和make命令.","s":"1. GeekOS:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-geekos","p":88},{"i":93,"t":"bochs是一个x86硬件平台的模拟器. GeekOS运行依托于bochs. 在安装好Linux操作系统后需要安装bochs以及nasm, 以完成GeekOS环境的搭建.","s":"2. bochs:","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-bochs","p":88},{"i":95,"t":"GeekOS的开发环境可分为两部分, 一部分是编译环境, 一部分是运行环境. 在编译过程中, 使用Linux自带的编译环境以及编译命令对特定的GeekOS project进行编译即可. 首先在终端中通过cd命令进入geekos-version/src/projecti/build目录, 再执行make depend和make命令. 编译后生成bochs的镜像文件fd.img, 这是bochs运行所必须的文件,也是GeekOS运行环境的前置配置.","s":"3. 二者之间的关系","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#3-二者之间的关系","p":88},{"i":97,"t":"安装其实非常简单, 这里主要花篇幅介绍安装后解决报错的配置.","s":"二、安装与配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#二安装与配置","p":88},{"i":99,"t":"需要下载GeekOS Files, 安装bochs, nasm等. GeekOS直接下载压缩包, 解压即可. arch系用户通过以下命令即可完成bochs和nasm的安装. yay -S bochs nasm 其他发行版的安装方法这里不再赘述, 可选择从群文件里下载源文件并编译安装, 师兄师姐也在群文件里给了一些教程指导.","s":"1. 安装","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#1-安装","p":88},{"i":101,"t":"完成安装后, 我们就可以开始对project0中的代码进行完善了, 并在geekos-version/src/project0/build目录下执行make depend以及make命令, 目的是编译project0的代码, 生成bochs的镜像文件fd.img以构建GeekOS的运行环境. 但很多报错就是在make这一步产生的, 因此在安装完成后还需要进行配置. 配置分为两部分, 一个是对GeekOS中makefile的修改, 另一部分是对bochs的配置文件的修改. GeekOS中makefile的配置​ 综合网上很多师兄师姐的博客,这三个错误应该是每个人都会遇到的,所以当你不确定自己能不能运行时,请全部完成这三个步骤. 问题: warnings being treated as errors 解决方案: 修改geekos-version/src/projecti/build目录下的makefie文件(由于每个project下都存在一个对应的makefile文件, 所以在每个项目编译前都要修改一次) // 修改第149行: CC_GENERAL_OPTS := $(GENERAL_OPTS) -Werror // 修改后: CC_GENERAL_OPTS := $(GENERAL_OPTS) 问题: X86_64与i386输出不兼容 解决方案: 修改geekos-version/src/projecti/build目录下的makefie文件 # Target C compiler. gcc 2.95.2 or later should work. 100行 TARGET_CC := $(TARGET_CC_PREFIX)gcc -m32 # Host C compiler. This is used to compile programs to execute on # the host platform, not the target (x86) platform. On x86/ELF # systems, such as Linux and FreeBSD, it can generally be the same # as the target C compiler. 106行 HOST_CC := gcc -m32 # Target linker. GNU ld is probably to only one that will work.109行 TARGET_LD := $(TARGET_CC_PREFIX)ld -m elf_i386 问题: undefined reference to '__stack_chk_fail' 解决方案: 修改geekos-version/src/projecti/build目录下的makefie文件 # Flags used for all C source files // 修改前:148行 GENERAL_OPTS := -O -Wall $(EXTRA_C_OPTS) // 修改后: GENERAL_OPTS := -O -Wall -fno-stack-protector $(EXTRA_C_OPTS) bochs配置文件的修改​ 在geekos-version/src/projecti/build目录下创建.bochsrc文件 # An example .bochsrc file. # You will need to edit these lines to reflect your system. vgaromimage: file=/usr/local/share/bochs/VGABIOS-lgpl-latest # 请根据自己的实际安装路径更改 romimage: file=/usr/local/share/bochs/BIOS-bochs-latest # 请根据自己的实际安装路径更改 megs: 8 boot: a floppya: 1_44=fd.img, status=inserted #floppya: 1_44=fd_aug.img, status=inserted log: ./bochs.out # keyboard_serial_delay: 200 # vga_update_interval: 300000 mouse: enabled=0 private_colormap: enabled=0 # i440fxsupport: enabled=0 # Uncomment this to write all bochs debugging messages to # bochs.out. This produces a lot of output, but can be very # useful for debugging the kernel. #debug: action=report 到此为止, 所有的配置工作已经完成, 可以正常的进行下一步的代码完善. 如果需要验证自己是否配置成功, 可以参照下一篇博客GeekOS project 0的实现, 在本篇博客中会有完整的C语言代码编写以及编译、使用bochs执行的过程.","s":"2. 配置","u":"/docs/课程学习/操作系统课设/Linux系统下GeekOS的环境配置","h":"#2-配置","p":88},{"i":107,"t":"MIPS=一个周期可执行的指令条数/(周期*10^6) CPI代表一条指令需要执行几个周期,则一个周期可执行的指令条数等于CPI的倒数 故MIPS=频率/(CPI*10^6)","s":"一、基础知识","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#一基础知识","p":105},{"i":110,"t":"一般情况下,数据在Cache以及主存中是以字为单位进行编码的 Cache与主存是以字块为单位进行数据交换的 Cache透明性分析:从主存中读的时候一定调入Cache,写的时候不一定: 按写分配:向主存中写入的时候同时调入Cache 不按写分配:向主存中写入的时候不调入Cache 在解答Cache与主存采用组相联、LRU替换算法进行访问主存序列流的类型题时,注意组相联,要将Cache分为组号和块号,分开进行更新: 先对主存地址求余,余数即为其在Cache中的组号 在组内是全相联映像,使用LRU替换算法进行替换操作 Cache预取算法命中率的计算 H′=H+n−1n(1)H'=\\frac{H+n-1}{n} \\tag{1}H′=nH+n−1​(1) 其中,nnn为Cache块大小与数据块重复使用次数的乘积,HHH为原来的命中率","s":"三、存储系统","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#三存储系统","p":105},{"i":112,"t":"当采用预留算法进行最优调度时,设最小平均间隔周期为xxx,则在第一个任务完成后,每隔xxx时钟周期流出一个任务 最小间隔周期的确定: 预约表中最多√数一行中的√数 通过次数最多的功能段的通过次数 此时,该功能段也就是瓶颈段","s":"四、流水线","u":"/docs/课程学习/计算机体系结构/体系结构复习笔记","h":"#四流水线","p":105},{"i":114,"t":"提示 欢迎来到笔记本的课程学习部分","s":"Welcome","u":"/docs/课程学习/intro","h":"","p":113},{"i":116,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/课程学习/intro","h":"#支持我","p":113},{"i":118,"t":"提示 Grateful for all the conveniences provided by Docusaurus! Grateful for Sonder's treasure trove of notebooks! Grateful for the blue sky and also the white clouds!","s":"鸣谢","u":"/docs/鸣谢/intro","h":"","p":117},{"i":120,"t":"提示 大数除法是指被除数大小超出long long范围,而导致必须使用字符串存储的除法,属于简单模拟的范畴","s":"大数除法","u":"/docs/推免/机试/大数除法","h":"","p":119},{"i":122,"t":"通过模拟列竖式手动计算除法,实现使用字符串存储被除数的大数除法","s":"思路","u":"/docs/推免/机试/大数除法","h":"#思路","p":119},{"i":124,"t":"string division(string s, int divisor) { /* * 通过模拟列竖式手算除法完成字符串存储的大数除法 */ string quotient; // 商 int idx = 0; // 当前处理的数字在原始字符串中的位置 int remainder = 0; // 余数 int temp = 0; while (idx < s.size()) { // 一直循环处理到索引等于长度 temp = remainder * 10 + (s[idx] - '0'); // 当前进行除法运算的temp if (temp >= divisor) { // 如果能除的动,则将当前的商插入quotient,并更新余数 quotient.push_back(temp / divisor + '0'); remainder = temp % divisor; } else { // 除不动时分两种情况 if (!quotient.empty()) { // 商目前不为空,此时按照竖式方法,需要向商中加入0,再接着下一次循环 quotient.push_back('0'); } remainder = temp; // 商目前为空,按照竖式计算方法,只更新余数,商保持为空 } idx++; // 更新索引位置 } if (quotient.empty()) { // 如果一直除不动,循环结束商还为空,则赋值为0字符串 quotient.assign(\"0\"); } return quotient; // 返回商字符串 }","s":"参考代码","u":"/docs/推免/机试/大数除法","h":"#参考代码","p":119},{"i":126,"t":"将大数除法与进制转换相结合。 提示 北京大学机试真题,N诺链接 完整代码如下: #include using namespace std; string division(string s, int divisor) { /* * 通过模拟列竖式手算除法完成字符串存储的大数除法 */ string quotient; // 商 int idx = 0; // 当前处理的数字在原始字符串中的位置 int remainder = 0; // 余数 int temp = 0; while (idx < s.size()) { // 一直循环处理到索引等于长度 temp = remainder * 10 + (s[idx] - '0'); // 当前进行除法运算的temp if (temp >= divisor) { // 如果能除的动,则将当前的商插入quotient,并更新余数 quotient.push_back(temp / divisor + '0'); remainder = temp % divisor; } else { // 除不动时分两种情况 if (!quotient.empty()) { // 商目前不为空,此时按照竖式方法,需要向商中加入0,再接着下一次循环 quotient.push_back('0'); } remainder = temp; // 商目前为空,按照竖式计算方法,只更新余数,商保持为空 } idx++; // 更新索引位置 } if (quotient.empty()) { // 如果一直除不动,循环结束商还为空,则赋值为0字符串 quotient.assign(\"0\"); } return quotient; // 返回商字符串 } int main() { string s; while (cin >> s) { vector vec; int len = s.size(); while (s != \"0\") { int remainder = (s[len - 1] - '0') % 2; vec.push_back(remainder); s = division(s, 2); len = s.size(); } if (vec.empty()) { cout << \"0\"; } else { for (auto it = vec.rbegin(); it != vec.rend(); it++) { cout << *it; } } cout << endl; } return 0; }","s":"扩展","u":"/docs/推免/机试/大数除法","h":"#扩展","p":119},{"i":130,"t":"显著性目标检测Salient Object Detection,相当于语义分割中的二分类任务,只有前景和背景","s":"(一)SOD任务","u":"/docs/推免/简历/简历面试准备","h":"#一sod任务","p":127},{"i":132,"t":"下图为U-2-Net的整体结构 提示 residual [rɪˈzɪdjuəl] 在encoder阶段,每个block之后使用maxpooling下采样两倍 在decoder阶段,每个block之后使用双线性插值上采样两倍 下图为Residual U-block的结构 提示 卷积是如何改变输出的通道数的? 卷积核的通道数等于输入的通道数,卷积核的个数等于输出的通道数 图片来源知乎 在特征融合阶段,每一层的encoder-decoder输出,使用3x3卷积以及双线性插值上采样到原始分辨率得到该层的特征图,且卷积核的个数为1,输出的feature map通道数也为1。将每一层的feature map进行concat拼接,得到6通道的融合feature map,最后使用1x1卷积以及sigmoid激活函数得到最终的融合特征图输出","s":"(二)网络结构","u":"/docs/推免/简历/简历面试准备","h":"#二网络结构","p":127},{"i":134,"t":"损失函数是7个损失项的加权求和 共有6层encoder-decoder结构,将每一层对应的feature map与ground truth做BCE Loss得到6个损失项 第7个损失项是最终融合得到的feature map与ground truth的BCE Loss 在论文中,每个损失项的权重都为1 canny边缘检测: 使用高斯滤波进行平滑 计算像素梯度 非极大值抑制 双阈值检测强边缘、弱边缘 边缘连接","s":"(三)损失函数","u":"/docs/推免/简历/简历面试准备","h":"#三损失函数","p":127},{"i":136,"t":"深度可分离卷积的优点是可以在大致保持卷积效果的情况下减少参数量 在实现原理上可分为两个步骤:深度卷积(depth wise)以及逐点(point wise)卷积 深度卷积是一种在每个输入通道上分别进行卷积操作的卷积方法,每个输入通道只与对应的卷积核进行卷积。 逐点卷积通过使用1×11 \\times 11×1卷积对深度卷积的结果再次卷积","s":"(四)深度可分离卷积","u":"/docs/推免/简历/简历面试准备","h":"#四深度可分离卷积","p":127},{"i":139,"t":"PR曲线所围成的面积即使该类的AP值","s":"(一)mAP","u":"/docs/推免/简历/简历面试准备","h":"#一map","p":127},{"i":141,"t":"提示 参考资料:【精读AI论文】YOLO V1目标检测,看我就够了 1.预测阶段​ 下图为YOLOv1的算法框架 下图为YOLOv1的网络结构 输入[448, 448, 3]图像,输出[7, 7, 30]的tensor(包含所有预测框的坐标、置信度和类别结果),通过解析输出的tensor得到预测结果 首先将输入图片划分为S×SS \\times SS×S个grid cell。在YOLOv1中S=7S=7S=7 每个grid cell预测出BBB个bounding box预测框(bbox),每个bbox的中心点都落在该grid cell中。在YOLOv1中B=2B=2B=2 每个bbox包含(x, y, h, w, c)五种信息,其中x, y为bbox左上角坐标,h, w为bbox的宽高,c为该bbox是否存在object的概率 同时每个grid cell预测出一组与数据集有关的条件类别概率。在YOLOv1论文使用的数据集Pascal VOC中,类别种类为20类,因此在预测阶段输出的[7, 7, 30]的tensor含义如下图所示 每个grid cell选出条件类别概率最大的类别,因此每个grid cell只能检测一个物体 提示 这也是YOLOv1小目标和密集目标识别能力差的原因 每个bbox的置信度与其父grid cell的类别概率相乘得到全概率,如下图所示 进行NMS后处理: 对某一特定类别,首先根据全概率置信度排序 将此时最大置信度的bbox与其他所有置信度更小的bbox做IoU判断,若IoU大于设置的阈值,则抹除置信度小的bbox 将剩余的次大的置信度重复步骤2,抹除所有置信度更小的其IoU超过阈值的bbox 提示 非极大值抑制只在预测阶段进行 在训练阶段,所有bbox都会在Loss Function中起到更新的作用,因此不进行NMS 2. 训练过程的损失函数​","s":"(二)YOLOv1","u":"/docs/推免/简历/简历面试准备","h":"#二yolov1","p":127},{"i":143,"t":"1. BN层​ 2. 高分辨率训练​ 3. Anchor​ YOLOv2引入了anchor机制代替bbox,将图像划分为13×1313 \\times 1313×13个grid cell,每个grid cell生成5个anchor anchor是通过k-means聚类在数据集上生成的不同尺寸的先验框 对数据集进行anchor宽高比的聚类,聚类数越大,覆盖的IoU越大,但同时模型也更复杂","s":"(二)YOLOv2","u":"/docs/推免/简历/简历面试准备","h":"#二yolov2","p":127},{"i":145,"t":"1. 特征融合​ YOLOv5使用CSPNet实现特征融合,CSP模块由主干和分支构成,主干提取低维特征,分支提取高维特征 主干通过卷积和池化提取特征,形成不同尺寸的特征图 分支将主干输出的特征图作为输入,逐步卷积和上采样提取高级别语义特征 主干特征图通过卷积对通道数降维之后与分支在通道维度上concat 提示 在特征提取以及融合阶段可以加入Canny边缘检测得到的特征图进行特征融合 2. 前处理​ 对填充黑色像素进行了改善,以填充更少的黑像素,提高了精度 3. 特征金字塔FCN​","s":"(三)YOLOv5","u":"/docs/推免/简历/简历面试准备","h":"#三yolov5","p":127},{"i":147,"t":"::: 有关CSP特征融合可以参考:https://blog.csdn.net/weixin_55073640/article/details/122614176 ::: CBAM是通道+空间注意力机制(SENet是通道注意力机制)","s":"三、CBAM","u":"/docs/推免/简历/简历面试准备","h":"#三cbam","p":127},{"i":149,"t":"通道注意力:原始特征图[b,c,h,w][b, c, h, w][b,c,h,w]经过通道注意力机制算法得到[b,c,1,1][b, c, 1, 1][b,c,1,1]的tensor,代表不同通道之间的重要程度,将其与原始特征图相乘 空间注意力:经过通道注意力的特征图[b,c,h,w][b, c, h, w][b,c,h,w]经过空间注意力机制算法得到[b,1,h,w][b, 1, h, w][b,1,h,w]的tensor,代表宽高维度的像素之间的重要程度,将其与原始特征图相乘","s":"(一)总体结构","u":"/docs/推免/简历/简历面试准备","h":"#一总体结构","p":127},{"i":151,"t":"原始特征图[b,c,h,w][b, c, h, w][b,c,h,w]分别经过最大池化和平均池化来压缩空间维度、学习通道之间的特征,得到[b,c,1,1][b, c, 1, 1][b,c,1,1]的tensor,再送入共享的多层感知机网络进行降维再升维,最后将二者相加再经过sigmoid函数产生最终的通道注意力特征图","s":"(二)通道注意力","u":"/docs/推免/简历/简历面试准备","h":"#二通道注意力","p":127},{"i":153,"t":"原始特征图[b,c,h,w][b, c, h, w][b,c,h,w]分别经过最大池化和平均池化(通过torch.max和torch.mean函数实现)得到[b,1,h,w][b, 1, h, w][b,1,h,w]的tensor,再将二者concat后通过7×77 \\times 77×7卷积学习特征并降维,最后送入sigmoid函数得到最终的空间注意力特征图","s":"(三)空间注意力","u":"/docs/推免/简历/简历面试准备","h":"#三空间注意力","p":127},{"i":155,"t":"作者分别对通道注意力以及空间注意力使用最大池化还是平均池化做了消融实验,结果反映二者都用最大池化以及平均池化再相加效果最好(且对于7×77 \\times 77×7卷积与3×33 \\times 33×3卷积的消融实验发现,7×77 \\times 77×7卷积效果更好) 作者对先通道注意力还是先空间注意力做了消融实验,结果发现先通道再空间效果更好","s":"(四)其他注意事项","u":"/docs/推免/简历/简历面试准备","h":"#四其他注意事项","p":127},{"i":157,"t":"Focal Loss通过引入修正项和样本关注度超参数,增加困难样本的关注度,来解决类别不均衡问题。 YOLO损失函数分为分类损失以及回归损失,可以在分类损失中引入Focal Loss代替原来的交叉熵损失","s":"四、Focal Loss","u":"/docs/推免/简历/简历面试准备","h":"#四focal-loss","p":127},{"i":159,"t":"Squeeze and Excitation Squeeze挤压操作就是将[b,c,h,w][b, c, h, w][b,c,h,w]的特征图通过池化挤压宽高维度,得到[b,c,1,1][b, c, 1, 1][b,c,1,1]的tensor,该tensor还要经过所示的全连接层-ReLU-全连接层结构 Excitation激励操作就是通过sigmoid函数得到每个通道之间的重要程度系数","s":"五、SENet","u":"/docs/推免/简历/简历面试准备","h":"#五senet","p":127},{"i":161,"t":"自注意力机制通过计算元素之间的相似度来确定它们之间的关联性,并对其进行加权处理以获得上下文信息。 自注意力机制通过对输入的元素进行线性变换来得到查询(Query)向量、键(Key)向量和值(Value)向量。 通过点积和缩放点积计算相似程度 通过自注意力机制,每个元素都可以通过与其他元素的相似度计算和加权求和,获取到与它们相关的上下文信息。相似度高的元素将获得更高的权重,因此更受到关注和影响,从而建立起元素之间的关联性。","s":"六、自注意力机制","u":"/docs/推免/简历/简历面试准备","h":"#六自注意力机制","p":127},{"i":164,"t":"This content has been encrypted.","s":"(一)英文自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#一英文自我介绍","p":127},{"i":166,"t":"1. 英文自我介绍​ This content has been encrypted. 2. 中文自我介绍​ This content has been encrypted.","s":"(二)西电广研院自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#二西电广研院自我介绍","p":127},{"i":168,"t":"1. 英文自我介绍​ This content has been encrypted. 2. 中文自我介绍​ This content has been encrypted.","s":"(三)电子科技大学自我介绍","u":"/docs/推免/简历/简历面试准备","h":"#三电子科技大学自我介绍","p":127},{"i":171,"t":"树的性质: 一棵 N 个结点的树有 N-1 条边 树的总度数+1=树的结点数 树的度=树中度最大结点的度数 二叉树的性质: 叶子结点数等于度为 2 的结点数加 1,即n0 = n2 + 1 树转化为二叉树: 参考资料:知乎 加线。在所有的兄弟结点之间加一条线。 去线。树中的每个结点,只保留它与第一个孩子结点的连线,删除其他孩子结点之间的连线。 调整。每个结点的原来的孩子是结点的左孩子,由原来的兄弟结点转过来的孩子是结点的右孩子。 二叉排序树:每个结点的左子树上的所有结点值都更小,每个结点的右子树上的所有结点的值都更大。 平衡二叉排序树:要么是空树,要么左子树的高度与右子树的高度之差小于等于1。","s":"树","u":"/docs/推免/计算机基础综合/数据结构","h":"#树","p":169},{"i":173,"t":"图的表示: 邻接矩阵 邻接表:每一行表示的是一个顶点所连接的顶点,链表不具有指向性 邻接表的搜索 最小生成树:在连通网的所有生成树中,所有边的代价和最小的生成树,称为最小生成树。 Kruskal算法 Prim算法 最短路径 ​ ​","s":"图","u":"/docs/推免/计算机基础综合/数据结构","h":"#图","p":169},{"i":175,"t":"提示 参考链接: 线性代数极简入门 《线性代数》高清教学视频 “惊叹号”系列 宋浩老师","s":"线性代数","u":"/docs/推免/数学/线性代数","h":"","p":174},{"i":177,"t":"线性相关与线性无关:向量组中的任一向量都不能被其它向量线性表示,就说向量组线性无关;否则就是线性相关。 矩阵转置:将矩阵的行和列互相交换 矩阵求逆:对于方阵A,若存在方阵B使得AB=BA=单位方阵I,则方阵B为方阵A的逆矩阵,记为A−1A^{-1}A−1","s":"一、基础知识","u":"/docs/推免/数学/线性代数","h":"#一基础知识","p":174},{"i":179,"t":"线性代数中的初等行变换。 交换两行 用非零常数乘以某一行 用一行的倍数加到另一行上 如何理解矩阵的秩。 矩阵的秩是指矩阵的列空间(或行空间)的维数,简而言之是矩阵中所有非零行(或列)向量构成的集合所组成的最大线性无关组的向量个数。 提示 宋浩八字:非零子式的最高阶数 任意矩阵的行秩都等于列秩。 矩阵的秩与线性方程组解的关系。 对于n元线性方程组而言: 当系数矩阵的秩等于增广矩阵的秩且秩等于n时,有唯一解 当系数矩阵的秩等于增广矩阵的秩且秩大于n时,有无穷多解 当系数矩阵的秩不等于增广矩阵的秩时,无解 提示 当系数矩阵的秩小于增广矩阵的秩时,说明系数矩阵中的某一列向量(或行向量)可以被其他列向量(或行向量)线性表示,此时该行不能提供额外的线性独立信息 简述向量组线性无关的含义。 含义:若一个向量组是线性无关的,则该向量组中的每个向量都不能表示成其他向量的线性组合。 意义:如果一个向量组线性无关,那么该向量组所张成的空间就是一个最小维度的向量空间,并且该向量空间中的任何向量都可由这些向量线性组合表示。 判定方法:如果一个向量组中的所有向量都不可以由其他向量线性组合得到,则称该向量组为线性无关的。否则,如果存在某个向量可以表示成其他向量的线性组合,则该向量组就不是线性无关的。 解释正定矩阵以及半正定矩阵。 简述特征值的含义。 特征值描述了矩阵在特定方向(特征向量方向)上的缩放因子,特征向量表示矩阵在这个特定方向上的不变性。 简述矩阵分解的物理意义。 矩阵分解是将一个矩阵表示为一些特定形式的矩阵乘积的过程。 矩阵分解的种类以及物理意义: LU分解:将矩阵分解为一个下三角矩阵和一个上三角矩阵的乘积。物理意义包括解线性方程组、计算矩阵的行列式和逆矩阵等。 QR分解:将矩阵分解为一个正交矩阵和一个上三角矩阵的乘积。物理意义包括最小二乘问题、矩阵的特征值计算等。 特征值分解:将矩阵分解为一个特征向量矩阵和一个对角矩阵的乘积。物理意义包括矩阵的幂、指数和对称矩阵的对角化等。 奇异值分解(SVD):将矩阵分解为一个正交矩阵、一个对角矩阵和一个正交矩阵的乘积。物理意义包括降维、矩阵逼近和图像压缩等。","s":"二、面试常考问题","u":"/docs/推免/数学/线性代数","h":"#二面试常考问题","p":174},{"i":182,"t":"简述大数定理。 大数定理描述了大样本情况下随机变量的均值与其期望值之间的关系。对于独立同分布的随机变量序列,随着样本数量的增加,样本均值会以较高的概率接近其期望值。 简述中心极限定理。 当独立随机变量的数量足够大时,它们的和(或平均值)的分布会逐渐接近一个正态分布。即使原始随机变量不服从正态分布,但当样本容量足够大时,和(或平均值)的分布仍然呈现出正态分布的特征。 什么是全概率公式。 对于事件A而言,假设有一组互斥且穷尽的条件事件B,则事件A的概率等于事件A在每个条件事件下发生的概率与该条件事件发生概率的乘积和。 什么是最大似然估计。 基本思想是在已知观测数据的情况下,通过调整参数的取值,找到使得观测数据出现概率最大的参数值。 大致过程: 构建参数化的概率模型,即构建似然函数,表示在给定参数下观测数据出现的概率 取似然函数的对数,方便计算与优化 最大化似然函数,求解参数的最优值 简述贝叶斯定理。 贝叶斯定理描述了在给定观测数据的条件下,计算事件的后验概率的方法。 P(A∣B)=P(B∣A)∗P(A)P(B)P(A|B) = \\frac{P(B|A) * P(A)}{P(B)}P(A∣B)=P(B)P(B∣A)∗P(A)​ 其中: P(A∣B)P(A|B)P(A∣B)表示在观测到事件 B 发生的条件下,事件 A 发生的概率,称为后验概率 P(B∣A)P(B|A)P(B∣A)表示在事件 A 发生的条件下,事件 B 发生的概率,称为似然; P(A)P(A)P(A)和P(B)P(B)P(B)分别是事件 A 和事件 B 独立发生的先验概率。 优点:它能够将主观先验知识与观测数据相结合,通过不断更新后验概率来进行推断和决策。 P问题、NP问题以及NP完全问题 提示 P stands for Polynomial 意为多项式 P问题是可以在多项式时间内解决的问题 NP问题是可以在多项式时间内验证解的正确性的问题 NP完全问题是一类特殊的NP问题,没有已知的高效解决算法,并且可以在多项式时间内归约到任何其他的NP问题","s":"面试常考问题","u":"/docs/推免/数学/概率论","h":"#面试常考问题","p":180},{"i":185,"t":"线性相关与线性无关:向量组中的任一向量都不能被其它向量线性表示,就说向量组线性无关;否则就是线性相关。 矩阵转置:将矩阵的行和列互相交换 矩阵求逆:对于方阵A,若存在方阵B使得AB=BA=单位方阵I,则方阵B为方阵A的逆矩阵,记为A−1A^{-1}A−1 线性代数中的初等行变换。 交换两行 用非零常数乘以某一行 用一行的倍数加到另一行上 如何理解矩阵的秩。 矩阵的秩是指矩阵的列空间(或行空间)的维数,简而言之是矩阵中所有非零行(或列)向量构成的集合所组成的最大线性无关组的向量个数。 提示 宋浩八字:非零子式的最高阶数 任意矩阵的行秩都等于列秩。 矩阵的秩与线性方程组解的关系。 对于n元线性方程组而言: 当系数矩阵的秩等于增广矩阵的秩且秩等于n时,有唯一解 当系数矩阵的秩等于增广矩阵的秩且秩大于n时,有无穷多解 当系数矩阵的秩不等于增广矩阵的秩时,无解 提示 当系数矩阵的秩小于增广矩阵的秩时,说明系数矩阵中的某一列向量(或行向量)可以被其他列向量(或行向量)线性表示,此时该行不能提供额外的线性独立信息 简述向量组线性无关的含义。 含义:若一个向量组是线性无关的,则该向量组中的每个向量都不能表示成其他向量的线性组合。 意义:如果一个向量组线性无关,那么该向量组所张成的空间就是一个最小维度的向量空间,并且该向量空间中的任何向量都可由这些向量线性组合表示。 判定方法:如果一个向量组中的所有向量都不可以由其他向量线性组合得到,则称该向量组为线性无关的。否则,如果存在某个向量可以表示成其他向量的线性组合,则该向量组就不是线性无关的。 解释正定矩阵以及半正定矩阵。 简述特征值的含义。 特征值描述了矩阵在特定方向(特征向量方向)上的缩放因子,特征向量表示矩阵在这个特定方向上的不变性。 简述矩阵分解的物理意义。 矩阵分解是将一个矩阵表示为一些特定形式的矩阵乘积的过程。 矩阵分解的种类以及物理意义: LU分解:将矩阵分解为一个下三角矩阵和一个上三角矩阵的乘积。物理意义包括解线性方程组、计算矩阵的行列式和逆矩阵等。 QR分解:将矩阵分解为一个正交矩阵和一个上三角矩阵的乘积。物理意义包括最小二乘问题、矩阵的特征值计算等。 特征值分解:将矩阵分解为一个特征向量矩阵和一个对角矩阵的乘积。物理意义包括矩阵的幂、指数和对称矩阵的对角化等。 奇异值分解(SVD):将矩阵分解为一个正交矩阵、一个对角矩阵和一个正交矩阵的乘积。物理意义包括降维、矩阵逼近和图像压缩等。","s":"一、线性代数","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#一线性代数","p":183},{"i":187,"t":"简述大数定理。 大数定理描述了大样本情况下随机变量的均值与其期望值之间的关系。对于独立同分布的随机变量序列,随着样本数量的增加,样本均值会以较高的概率接近其期望值。 简述中心极限定理。 当独立随机变量的数量足够大时,它们的和(或平均值)的分布会逐渐接近一个正态分布。即使原始随机变量不服从正态分布,但当样本容量足够大时,和(或平均值)的分布仍然呈现出正态分布的特征。 什么是全概率公式。 对于事件A而言,假设有一组互斥且穷尽的条件事件B,则事件A的概率等于事件A在每个条件事件下发生的概率与该条件事件发生概率的乘积和。 什么是最大似然估计。 基本思想是在已知观测数据的情况下,通过调整参数的取值,找到使得观测数据出现概率最大的参数值。 大致过程: 构建参数化的概率模型,即构建似然函数,表示在给定参数下观测数据出现的概率 取似然函数的对数,方便计算与优化 最大化似然函数,求解参数的最优值 简述贝叶斯定理。 贝叶斯定理描述了在给定观测数据的条件下,计算事件的后验概率的方法。 P(A∣B)=P(B∣A)∗P(A)P(B)P(A|B) = \\frac{P(B|A) * P(A)}{P(B)}P(A∣B)=P(B)P(B∣A)∗P(A)​ 其中: P(A∣B)P(A|B)P(A∣B)表示在观测到事件 B 发生的条件下,事件 A 发生的概率,称为后验概率 P(B∣A)P(B|A)P(B∣A)表示在事件 A 发生的条件下,事件 B 发生的概率,称为似然; P(A)P(A)P(A)和P(B)P(B)P(B)分别是事件 A 和事件 B 独立发生的先验概率。 优点:它能够将主观先验知识与观测数据相结合,通过不断更新后验概率来进行推断和决策。 P问题、NP问题以及NP完全问题 提示 P stands for Polynomial 意为多项式 P问题是可以在多项式时间内解决的问题 NP问题是可以在多项式时间内验证解的正确性的问题 NP完全问题是一类特殊的NP问题,没有已知的高效解决算法,并且可以在多项式时间内归约到任何其他的NP问题","s":"二、概率论","u":"/docs/推免/数学/夏令营面试数学部分复习","h":"#二概率论","p":183},{"i":189,"t":"提示 欢迎来到笔记本的推免复习部分","s":"Welcome","u":"/docs/推免/intro","h":"","p":188},{"i":191,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/推免/intro","h":"#支持我","p":188},{"i":193,"t":"提示 设N是一个四位数,它的9倍恰好是其反序数(例如:1234的反序数是4321),求N的值","s":"反序输出","u":"/docs/Algorithms/题解/反序输出","h":"","p":192},{"i":195,"t":"#include using namespace std; int main() { for (int i = 1000; i <= 9999; i++) { int x = i * 9, y = 0; while (x > 0) { y = y * 10 + x % 10; x /= 10; } if (i == y) { cout << i << endl; } } return 0; }","s":"参考代码","u":"/docs/Algorithms/题解/反序输出","h":"#参考代码","p":192},{"i":197,"t":"反序输出可以分为两部分:拆分以及反序拼接 拆分:n位整数求余10可以得到最后一位,再除以10可以得到除去上述最后一位之后的n-1位整数,循环得到每一个最后一位,完成拆分 while (x > 0) { y = y * 10 + x % 10; // 拼接与拆分 x /= 10; } 拼接:将s中的数字拼接成整数 int sum = 0; for (int i = 0; i < s.size(); i++) { sum = sum * 10 + s[i]; }","s":"题解","u":"/docs/Algorithms/题解/反序输出","h":"#题解","p":192},{"i":199,"t":"提示 在一面很长的墙壁上,工人们用不同的油漆去刷墙,然而可能有些地方刷过以后觉得不好看,他们会重新刷一下。有些部分因为重复刷了很多次覆盖了很多层油漆,小诺很好奇那些地方被刷过多少种颜色的油漆。 输入描述: 若干行输入,每行两个数字B[i],E[i](0<=B[i]<=E[i]<=200000)表示这次刷的墙壁是哪一段 (假设每次刷的时候油漆颜色都和之前的不同),以0 0结束 又若干行输入,每行两个数字begin[i],end[i](0<=begin[i]<=end[i]<=200000)表示小诺询问的段, 以0 0结束 输出描述: 对于每个小诺的询问输出(end[i]-begin[i]+1)行,表示对应询问段的每个点被多少种颜色的油漆覆盖过。","s":"一维前缀和(刷出一道墙)","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"","p":198},{"i":201,"t":"#include using namespace std; int main() { vector colors(200001, 0); int B, E; while (scanf(\"%d %d\", &B, &E)) { if (B == 0 && E == 0) { break; } colors[B]++; // 刷墙起点标记 colors[E + 1]--; // 刷墙终点标记 } // 计算前缀和 for (int i = 1; i < colors.size(); i++) { colors[i] += colors[i - 1]; } int begin, end; while (scanf(\"%d %d\", &begin, &end)) { if (begin == 0 && end == 0) { break; } for (int i = begin; i <= end; i++) { printf(\"%d\\n\", colors[i]); } } return 0; }","s":"参考代码","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#参考代码","p":198},{"i":203,"t":"使用前缀和思想简化时间复杂度,设计前缀和数组,使输出的数组中元素的值代表其对应节点被刷的次数。 首先初始化前缀和数组,使每一个元素等于为0。 该题的巧妙之处就在于:对于每一个输入的索引B与E,B作为开始刷的节点索引令前缀和数组中对应元素的值+1+1+1,E+1作为刷墙结束的下一个节点的索引令对应的值−1-1−1。这样在所有输入结束后的计算前缀和阶段,在每一个值为[1,−1)[1, -1)[1,−1)的索引区间中的元素值都会加1,而对于某次刷漆终点E的下一个索引为E+1的元素值由于−1-1−1而抵消影响(自身值为−1-1−1加上之前元素所累积的1而归零),此时数组中元素的值才代表其对应节点被刷的次数。 关于超时,可以在函数中加入以下代码消除流操作的缓冲区,并使用\"\\n\"代替endl。 ios::sync_with_stdio(false);","s":"题解","u":"/docs/Algorithms/题解/一维前缀和(刷出一道墙)","h":"#题解","p":198},{"i":205,"t":"提示 输入一个数,比如201,让数字随意组合,是否能组合出30的倍数,如果能够组合成30的倍数,就输出最大的倍数,不能就输出-1 例如输入201可以随意组合成 201,210,012,021,102,120等数字 其中120,210都是30的倍数,由于要找最大的,所以答案是210 输入样例:201 输出样例:210","s":"排列组合(求30的倍数)","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"","p":204},{"i":207,"t":"#include using namespace std; int main() { string s; cin >> s; int maxx = 0, flag = 0; sort(s.begin(), s.end()); do { int now = 0; for (int i = 0; i < s.size(); i++) { now = now * 10 + s[i] - '0'; } if (now % 30 == 0) { flag = 1; maxx = max(maxx, now); } } while (next_permutation(s.begin(), s.end())); if (flag == 1) { cout << maxx << endl; return 0; } else { cout << -1 << endl; } }","s":"参考代码","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#参考代码","p":204},{"i":209,"t":"使用C++ STL提供的排列组合模版 首先将代排列组合的字符串或数组进行排序 sort(list.begin(), list.end()); 使用排列组合模版 do { something(); } while (next_permutation(list.begin(), list.end())); 此时,在每一个do循环中,list按从小到大的顺序进行排列组合遍历","s":"题解","u":"/docs/Algorithms/题解/排列组合(求30的倍数)","h":"#题解","p":204},{"i":211,"t":"vector:变长数组,倍增的思想 pair:存储一对数 string:字符串,substr(), c_str() queue:push(), front(), pop() priority_queue:优先队列,push(), top(), pop() stack:栈,push(), top(), pop() deque:双端队列 set, map, multiset, multimap:基于红黑树来实现,本质上是动态维护一个有序序列 unordered_set, unordered_map, unordered_multiset, unordered_multimap:哈希表 bitset:压位","s":"STL模板","u":"/docs/Algorithms/STL模板","h":"","p":210},{"i":213,"t":"vector a(10,3); // 定义一个长度为10的vector,初始化为3; a.size(); // vector的size,所有容器都有 a.empty(); // 范围vector是否为空,所有容器都有 a.clear(); // 清空 a.front(); // 第一个数 a.back(); // 最后一个数 a.push_back(); // 在最后插入一个数 a.pop_back(); // 删除最后一个数 // vector支持比较运算 vector a(4,3),b(3,4); if(a > b) cout << \"Yes\"; else cout << \"No\"","s":"vector","u":"/docs/Algorithms/STL模板","h":"#vector","p":210},{"i":215,"t":"pair a; a = {20,\"abc\"}; a.first(); // 获取第一个元素 a.second(); // 获取第二个元素 // pair也能进行sort","s":"pair","u":"/docs/Algorithms/STL模板","h":"#pair","p":210},{"i":217,"t":"string a = \"Acwing\"; a.size(); // 获取string的大小 a.empty(); // 判断是否为空 a.clear(); // 清空 a += \"def\"; cout << a. substr(1,2) << endl; // 第一个参数起始位置,第二个参数是字符串长度","s":"string","u":"/docs/Algorithms/STL模板","h":"#string","p":210},{"i":219,"t":"query a; a.size(); a.empty(); a.push(1); // 队尾插入元素 a.front(); // 返回队头元素 a.back(); // 返回队尾元素 a.pop(); // 删除队头元素","s":"query","u":"/docs/Algorithms/STL模板","h":"#query","p":210},{"i":221,"t":"// 默认是大根堆 priority_queue heap; heap.clear(); heap.size(); heap.empty(); // 如何定义一个小根堆: 1. 插入负数 2. 直接定义 heap.push(-x); // 黑科技方法 priority_queue,greater> q;","s":"priority_queue","u":"/docs/Algorithms/STL模板","h":"#priority_queue","p":210},{"i":223,"t":"stack s; s.size(); s.empty(); s.push(); s.top(); s.pop();","s":"stack","u":"/docs/Algorithms/STL模板","h":"#stack","p":210},{"i":225,"t":"deque a; a.size(); a.empty(); a.clear(); a.front(); a.back(); a.push_back(); a.pop_back();","s":"deque","u":"/docs/Algorithms/STL模板","h":"#deque","p":210},{"i":227,"t":"set s; // 不能有重复元素 // s.begin()/end() multiset MS; // 可以有重复元素 s.insert(1); 插入一个数 s.size(); s.empty(); s.clear(); s.find(1); // 查找一个元素,如果不存在的话返回end迭代器 s.erase(1); // 输入是一个数x,输出所有x (2)输入一个迭代器,删除这个迭代器 // set 最核心的操作 s.lower_bound(); // 范围大于等于x的最小的数 s.upper_bound(); // 返回大于x的最小的数","s":"set/multiset","u":"/docs/Algorithms/STL模板","h":"#setmultiset","p":210},{"i":229,"t":"#include // 和python里面的字典非常的相似 map a; a[\"2\"] = 3; a.insert({\"1\",1}); a.erase({\"1\",1}); a.find({\"1\",1}); unordered_set, unordered_map, unordered_multiset, unordered_multimap的操作和set或者map等的操作基本一致,唯一的区别就是不支持类似lower_bound()这样的操作 (哈希表的内部是无序的)","s":"map/multimap","u":"/docs/Algorithms/STL模板","h":"#mapmultimap","p":210},{"i":231,"t":"可以省下来8位的空间 bitset<10000> s; // 支持所有的基本操作: // 移位操作:<< >> // == != // count() 返回有多少个1 // any() 判断是否至少有一个1 // none() 判断是否全为0 // set(),把所有为置为1 // set(k,v), 将第k个变为v // reset(), 把所有位变成0 // flip(), 把所有位取反","s":"biset","u":"/docs/Algorithms/STL模板","h":"#biset","p":210},{"i":233,"t":"提示 欢迎来到笔记本的算法部分","s":"Welcome","u":"/docs/Algorithms/intro","h":"","p":232},{"i":235,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Algorithms/intro","h":"#支持我","p":232},{"i":238,"t":"卷积层会对输入的局部区域进行卷积操作,因此对于输入图像中的每个位置都会产生一个响应。然而,在某些情况下,我们并不关心输入图像中每个位置的细节,而只是想获取该区域的一些重要特征。 假设我们想分类一张猫的图片,那么我们可能只需要提取出它的眼睛、鼻子、嘴巴和耳朵等特征,而不必考虑这些特征在图像中的精确位置。","s":"一、卷积对像素位置信息是敏感的","u":"/docs/Deep Learning/基础知识/池化层","h":"#一卷积对像素位置信息是敏感的","p":236},{"i":240,"t":"池化层通过对输入的局部区域进行降采样操作,减少了特征图的大小,从而使得模型对于输入位置的微小变化更加鲁棒。例如,如果我们将一个对象稍微平移一点,它依然可以被正确地识别,因为池化层可以保留输入图像的关键特征,而忽略掉微小的位置变化。 但是需要注意的是,当池化的步幅和池化区域的大小过大时,会导致模型丢失较多的细节信息,从而影响模型性能。因此,在实际应用中,需要根据具体任务来选择适当的池化参数。 缓解卷积层对位置的敏感性,提高鲁棒:池化操作通常用于卷积层之后,使模型对于输入位置的微小变化更加鲁棒,减少图像中的噪声和冗余信息 减小特征图大小:池化操作会通过在特定位置上合并特征值来缩小输入特征图的空间大小,降低计算开销。 减少参数数量:池化操作减小了特征图的空间大小,从而也减小了需要训练的权重参数数量,更容易训练和优化。","s":"二、池化层的作用","u":"/docs/Deep Learning/基础知识/池化层","h":"#二池化层的作用","p":236},{"i":242,"t":"池化层将输入特征图分割成若干个区域,然后对每个区域进行汇聚操作,将该区域内的特征值合并成一个值。这个操作可以使用不同的方法实现,如最大值池化、平均值池化等。 最常见的是最大值池化,其中每个区域的输出值是该区域内特征值的最大值,这样可以保留图像中最显著的特征,同时减少噪声和冗余信息的影响。","s":"三、池化的实现","u":"/docs/Deep Learning/基础知识/池化层","h":"#三池化的实现","p":236},{"i":244,"t":"[TOC]","s":"机试技巧与STL","u":"/docs/Algorithms/机试技巧与STL","h":"","p":243},{"i":246,"t":"CTRL + J 列出成员 Ctrl+E,D 格式化全部代码 Ctrl+K,F 格式化选中的代码 CTRL + SHIFT + E 显示资源视图 F12 转到定义 CTRL + F12 转到声明 CTRL + ALT + J 对象浏览 CTRL + ALT + F1 帮助目录 CTRL + F1 动态帮助 CTRL + K, CTRL + C 注释选择的代码 CTRL + K, CTRL + U 取消对选择代码的注释 CTRL + U 转小写 CTRL + SHIFT + U 转大写 F5 运行调试 CTRL + F5 运行不调试 F10 跨过程序执行 F11 单步逐句执行","s":"vs2018 快捷键","u":"/docs/Algorithms/机试技巧与STL","h":"#vs2018-快捷键","p":243},{"i":249,"t":"头文件 说明 头文件 说明 头文件 说明 assert.h 断言相关 ctype.h 字符类型判断 errno.h 标准错误机制 float.h 浮点限制 limits.h 整形限制 locale.h 本地化接口 math.h 数学函数 setjmp.h 非本地跳转 signal.h 信号相关 stdarg.h 可变参数处理 stddef.h 宏和类型定义 stdio.h 标准I/O stdlib.h 标准工具库 string.h 字符串和内存处理 time.h 时间相关","s":"标准c库","u":"/docs/Algorithms/机试技巧与STL","h":"#标准c库","p":243},{"i":251,"t":"using namespace std; 头文件 说明 头文件 说明 头文件 说明 algorithm 通用算法 deque 双端队列 vector 向量 iterator 迭代器 stack 栈 map 图(键值对) list 列表 string 字符串 set 集合 queue 队列 bitset bit类 numeric 数值算法","s":"c++ STL","u":"/docs/Algorithms/机试技巧与STL","h":"#c-stl","p":243},{"i":253,"t":"#include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace std;","s":"常用头","u":"/docs/Algorithms/机试技巧与STL","h":"#常用头","p":243},{"i":255,"t":"//求最大值和最小值 #define MAX(x,y) (((x)>(y)) ? (x) : (y)) #define MIN(x,y) (((x) < (y)) ? (x) : (y)) //取余 #define mod(x) ((x)%MOD) //for循环 #define FOR(i,f_start,f_end) for(int i=f_start;i<=f_end;++i) //返回数组元素的个数 #define ARR_SIZE(a) (sizeof((a))/sizeof((a[0]))) //初始化数组 #define MT(x,i) memset(x,i,sizeof(x)) #define MEM(a,b) memset((a),(b),sizeof(a)) //符号重定义 #define LL long long #define ull unsigned long long #define pii pair //常见常数 #define PI acos(-1.0) #define eps 1e-12 #define INF 0x3f3f3f3f //int最大值 const int INF_INT = 2147483647; const ll INF_LL = 9223372036854775807LL; const ull INF_ULL = 18446744073709551615Ull; const ll P = 92540646808111039LL; const ll maxn = 1e5 + 10, MOD = 1e9 + 7; const int Move[4][2] = {-1,0,1,0,0,1,0,-1}; const int Move_[8][2] = {-1,-1,-1,0,-1,1,0,-1,0,1,1,-1,1,0,1,1};","s":"常用宏定义","u":"/docs/Algorithms/机试技巧与STL","h":"#常用宏定义","p":243},{"i":258,"t":"struct InitMember { int first; double second; char* third; float four; };","s":"定义","u":"/docs/Algorithms/机试技巧与STL","h":"#定义","p":243},{"i":260,"t":"方法一:定义时赋值​ struct InitMember test = {-10,3.141590,\"method one\",0.25}; 方法二:定义后逐个赋值​ struct InitMember test; test.first = -10; test.second = 3.141590; test.third = \"method two\"; test.four = 0.25; 方法三:定义时乱序赋值(C++风格)​ struct InitMember test = { second:3.141590, third:\"method three\", first:-10, four:0.25 }; 方法四:构造函数​ //定义图的定点 typedef struct Vertex { int id,inDegree,outDegree; vector connectors; //存储节点的后续连接顶点编号 Vertex() : id(-1),inDegree(0),outDegree(0) {} Vertex(int nid) : id(nid),inDegree(0),outDegree(0) {} } Vertex; //定义Graph的邻接表表示 typedef struct Graph { vector vertexs; //存储定点信息 int nVertexs; //计数:邻接数 bool isDAG; //标志:是有向图吗 Graph(int n, bool isDAG) : nVertexs(n), isDAG(isDAG) { vertexs.resize(n); } Graph() : nVertexs(1), isDAG(1) { vertexs.resize(1); } //向图中添加边 bool addEdge(int id1, int id2) { ... ... ... return true; } } Graph; Graph g(8, false);","s":"初始化","u":"/docs/Algorithms/机试技巧与STL","h":"#初始化","p":243},{"i":262,"t":"typedef struct{int id;int h;} node; bool operator <(const node& a,const node & b){return (a.h)<(b.h);}","s":"运算符重载","u":"/docs/Algorithms/机试技巧与STL","h":"#运算符重载","p":243},{"i":265,"t":"int *x = new int; //开辟一个存放整数的存储空间,返回一个指向该存储空间的地址(即指针) int *a = new int(100); //开辟一个存放整数的空间,并指定该整数的初值为100,返回一个指向该存储空间的地址 char *b = new char[10]; //开辟一个存放字符数组(包括10个元素)的空间,返回首元素的地址 float *p=new float (3.14159);//开辟一个存放单精度数的空间,并指定该实数的初值为//3.14159,将返回的该空间的地址赋给指针变量p","s":"常规","u":"/docs/Algorithms/机试技巧与STL","h":"#常规","p":243},{"i":267,"t":"//列值固定 const int MAXCOL = 3; cin>>row; //申请一维数据并将其转成二维数组指针 int *pp_arr = new int[nRow * MAXCOL]; int (*p)[MAXCOL] = (int(*)[MAXCOL])pp_arr; //此时p[i][j]就可正常使用","s":"动态申请列大小固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请列大小固定的二维数组","p":243},{"i":269,"t":"cin>>row>>col; int **p = new int*[row]; for (int i = 0; i < row; i ++) { p[i] = new int[col]; }","s":"动态申请大小不固定的二维数组","u":"/docs/Algorithms/机试技巧与STL","h":"#动态申请大小不固定的二维数组","p":243},{"i":271,"t":"参考: https://blog.csdn.net/f_zyj/article/details/51594851 https://download.csdn.net/download/f_zyj/9988653","s":"常用STL","u":"/docs/Algorithms/机试技巧与STL","h":"#常用stl","p":243},{"i":273,"t":"STL底层说明​ C++ STL 的实现: 1.vector 底层数据结构为数组 ,支持快速随机访问 2.list 底层数据结构为双向链表,支持快速增删 3.deque 底层数据结构为一个中央控制器和多个缓冲区,详细见STL源码剖析P146,支持首尾(中间不能)快速增删,也支持随机访问 deque是一个双端队列(double-ended queue),也是在堆中保存内容的.它的保存形式如下: [堆1] --> [堆2] -->[堆3] --> ... 每个堆保存好几个元素,然后堆和堆之间有指针指向,看起来像是list和vector的结合品. 4.stack 底层一般用list或deque实现,封闭头部即可,不用vector的原因应该是容量大小有限制,扩容耗时 5.queue 底层一般用list或deque实现,封闭头部即可,不用vector的原因应该是容量大小有限制,扩容耗时 (stack和queue其实是适配器,而不叫容器,因为是对容器的再封装) 6.priority_queue 的底层数据结构一般为vector为底层容器,堆heap为处理规则来管理底层容器实现 7.set 底层数据结构为红黑树,有序,不重复 8.multiset 底层数据结构为红黑树,有序,可重复 9.map 底层数据结构为红黑树,有序,不重复 10.multimap 底层数据结构为红黑树,有序,可重复 11.hash_set 底层数据结构为hash表,无序,不重复 12.hash_multiset 底层数据结构为hash表,无序,可重复 13.hash_map 底层数据结构为hash表,无序,不重复 14.hash_multimap 底层数据结构为hash表,无序,可重复 CCF 编译出错原因: 不允许C++STL容器嵌套(需要满足相应的格式)​ 就是要在后面的“>”之间,必须得有一个空格,如果有多层,那每层都得有一个空格。 map > user;","s":"简述","u":"/docs/Algorithms/机试技巧与STL","h":"#简述","p":243},{"i":275,"t":"头文件:lgorithm 函数参数,返回值以及具体的使用方法请自行去头文件找定义!!! 不修改内容的序列操作​ 函数 说明 adjacent_find 查找两个相邻(Adjacent)的等价(Identical)元素 all_ofC++11 检测在给定范围中是否所有元素都满足给定的条件 any_ofC++11 检测在给定范围中是否存在元素满足给定条件 count 返回值等价于给定值的元素的个数 count_if 返回值满足给定条件的元素的个数 equal 返回两个范围是否相等 find 返回第一个值等价于给定值的元素 find_end 查找范围A中与范围B等价的子范围最后出现的位置 find_first_of 查找范围A中第一个与范围B中任一元素等价的元素的位置 find_if 返回第一个值满足给定条件的元素 find_if_notC++11 返回第一个值不满足给定条件的元素 for_each 对范围中的每个元素调用指定函数 mismatch 返回两个范围中第一个元素不等价的位置 none_ofC++11 检测在给定范围中是否不存在元素满足给定的条件 search 在范围A中查找第一个与范围B等价的子范围的位置 search_n 在给定范围中查找第一个连续n个元素都等价于给定值的子范围的位置 修改内容的序列操作​ 函数 说明 copy 将一个范围中的元素拷贝到新的位置处 copy_backward 将一个范围中的元素按逆序拷贝到新的位置处 copy_ifC++11 将一个范围中满足给定条件的元素拷贝到新的位置处 copy_nC++11 拷贝 n 个元素到新的位置处 fill 将一个范围的元素赋值为给定值 fill_n 将某个位置开始的 n 个元素赋值为给定值 generate 将一个函数的执行结果保存到指定范围的元素中,用于批量赋值范围中的元素 generate_n 将一个函数的执行结果保存到指定位置开始的 n 个元素中 iter_swap 交换两个迭代器(Iterator)指向的元素 moveC++11 将一个范围中的元素移动到新的位置处 move_backwardC++11 将一个范围中的元素按逆序移动到新的位置处 random_shuffle 随机打乱指定范围中的元素的位置 remove 将一个范围中值等价于给定值的元素删除 remove_if 将一个范围中值满足给定条件的元素删除 remove_copy 拷贝一个范围的元素,将其中值等价于给定值的元素删除 remove_copy_if 拷贝一个范围的元素,将其中值满足给定条件的元素删除 replace 将一个范围中值等价于给定值的元素赋值为新的值 replace_copy 拷贝一个范围的元素,将其中值等价于给定值的元素赋值为新的值 replace_copy_if 拷贝一个范围的元素,将其中值满足给定条件的元素赋值为新的值 replace_if 将一个范围中值满足给定条件的元素赋值为新的值 reverse 反转排序指定范围中的元素 reverse_copy 拷贝指定范围的反转排序结果 rotate 循环移动指定范围中的元素 rotate_copy 拷贝指定范围的循环移动结果 shuffleC++11 用指定的随机数引擎随机打乱指定范围中的元素的位置 swap 交换两个对象的值 swap_ranges 交换两个范围的元素 transform 对指定范围中的每个元素调用某个函数以改变元素的值 unique 删除指定范围中的所有连续重复元素,仅仅留下每组等值元素中的第一个元素。 unique_copy 拷贝指定范围的唯一化(参考上述的 unique)结果 划分操作​ 函数 说明 is_partitionedC++11 检测某个范围是否按指定谓词(Predicate)划分过 partition 将某个范围划分为两组 partition_copyC++11 拷贝指定范围的划分结果 partition_pointC++11 返回被划分范围的划分点 stable_partition 稳定划分,两组元素各维持相对顺序 排序操作​ 函数 说明 is_sortedC++11 检测指定范围是否已排序 is_sorted_untilC++11 返回最大已排序子范围 nth_element 部份排序指定范围中的元素,使得范围按给定位置处的元素划分 partial_sort 部份排序 partial_sort_copy 拷贝部分排序的结果 sort 排序 stable_sort 稳定排序 二分法查找操作​ 函数 说明 binary_search 判断范围中是否存在值等价于给定值的元素 equal_range 返回范围中值等于给定值的元素组成的子范围 lower_bound 返回指向范围中第一个值大于或等于给定值的元素的迭代器 upper_bound 返回指向范围中第一个值大于给定值的元素的迭代器 集合操作​ 函数 说明 includes 判断一个集合是否是另一个集合的子集 inplace_merge 就绪合并 merge 合并 set_difference 获得两个集合的差集 set_intersection 获得两个集合的交集 set_symmetric_difference 获得两个集合的对称差 set_union 获得两个集合的并集 堆操作​ 函数 说明 is_heap 检测给定范围是否满足堆结构 is_heap_untilC++11 检测给定范围中满足堆结构的最大子范围 make_heap 用给定范围构造出一个堆 pop_heap 从一个堆中删除最大的元素 push_heap 向堆中增加一个元素 sort_heap 将满足堆结构的范围排序 最大/最小操作​ 函数 说明 is_permutationC++11 判断一个序列是否是另一个序列的一种排序 lexicographical_compare 比较两个序列的字典序 max 返回两个元素中值最大的元素 max_element 返回给定范围中值最大的元素 min 返回两个元素中值最小的元素 min_element 返回给定范围中值最小的元素 minmaxC++11 返回两个元素中值最大及最小的元素 minmax_elementC++11 返回给定范围中值最大及最小的元素 next_permutation 返回给定范围中的元素组成的下一个按字典序的排列 prev_permutation 返回给定范围中的元素组成的上一个按字典序的排列","s":"algorithm","u":"/docs/Algorithms/机试技巧与STL","h":"#algorithm","p":243},{"i":277,"t":"头文件:vector 在STL的vector头文件中定义了vector(向量容器模版类),vector容器以连续数组的方式存储元素序列,可以将vector看作是以顺序结构实现的线性表。当我们在程序中需要使用动态数组时,vector将会是理想的选择,vector可以在使用过程中动态地增长存储空间。 vector模版类需要两个模版参数,第一个参数是存储元素的数据类型,第二个参数是存储分配器的类型,其中第二个参数是可选的,如果不给出第二个参数,将使用默认的分配器 下面给出几个常用的定义vector向量对象的方法示例: vector s; // 定义一个空的vector对象,存储的是int类型的元素 vector s(n); // 定义一个含有n个int元素的vector对象 vector s(first, last); // 定义一个vector对象,并从由迭代器first和last定义的序列[first, last)中复制初值 vector的基本操作: s[i] // 直接以下标方式访问容器中的元素 s.front() // 返回首元素 s.back() // 返回尾元素 s.push_back(x) // 向表尾插入元素x s.size() // 返回表长 s.empty() // 表为空时,返回真,否则返回假 s.pop_back() // 删除表尾元素 s.begin() // 返回指向首元素的随机存取迭代器 s.end() // 返回指向尾元素的下一个位置的随机存取迭代器 s.insert(it, val) // 向迭代器it指向的元素前插入新元素val s.insert(it, n, val)// 向迭代器it指向的元素前插入n个新元素val s.insert(it, first, last) // 将由迭代器first和last所指定的序列[first, last)插入到迭代器it指向的元素前面 s.erase(it) // 删除由迭代器it所指向的元素 s.erase(first, last)// 删除由迭代器first和last所指定的序列[first, last) s.reserve(n) // 预分配缓冲空间,使存储空间至少可容纳n个元素 s.resize(n) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),元素默认值将填满扩展出的空间 s.resize(n, val) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),val将填满扩展出的空间 s.clear() // 删除容器中的所有元素 s.swap(v) // 将s与另一个vector对象进行交换 s.assign(first, last) // 将序列替换成由迭代器first和last所指定的序列[first, last),[first, last)不能是原序列中的一部分 // 要注意的是,resize操作和clear操作都是对表的有效元素进行的操作,但并不一定会改变缓冲空间的大小 // 另外,vector还有其他的一些操作,如反转、取反等,不再一一列举 // vector上还定义了序列之间的比较操作运算符(>、<、>=、<=、==、!=),可以按照字典序比较两个序列。 // 还是来看一些示例代码吧…… /* * 输入个数不定的一组整数,再将这组整数按倒序输出 */ #include #include using namespace std; int main() { vector L; int x; while(cin >> x) { L.push_back(x); } for (int i = L.size() - 1; i >= 0; i--) { cout << L[i] << \" \"; } cout << endl; return 0; }","s":"vector","u":"/docs/Algorithms/机试技巧与STL","h":"#vector","p":243},{"i":279,"t":"头文件:list 下面给出几个常用的定义list对象的方法示例: lista{1,2,3} lista(n) //声明一个n个元素的列表,每个元素都是0 lista(n, m) //声明一个n个元素的列表,每个元素都是m lista(first, last) //声明一个列表,其元素的初始值来源于由区间所指定的序列中的元素,first和last是迭代器 list的基本操作: a.begin() // 返回指向首元素的随机存取迭代器 a.end() // 返回指向尾元素的下一个位置的随机存取迭代器 a.push_front(x) // 向表头插入元素x a.push_back(x) // 向表尾插入元素x a.pop_back() // 删除表尾元素 a.pop_front() // 删除表头元素 a.size() // 返回表长 a.empty() // 表为空时,返回真,否则返回假 a.resize(n) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),元素默认值将填满扩展出的空间 a.resize(n, val) // 改变序列长度,超出的元素将会全部被删除,如果序列需要扩展(原空间小于n),val将填满扩展出的空间 a.clear() // 删除容器中的所有元素 a.front() // 返回首元素 a.back() // 返回尾元素 a.swap(v) // 将a与另一个list对象进行交换 a.merge(b) // 调用结束后b变为空,a中元素包含原来a和b的元素 a.insert(it, val) // 向迭代器it指向的元素前插入新元素val a.insert(it, n, val)// 向迭代器it指向的元素前插入n个新元素val a.insert(it, first, last) // 将由迭代器first和last所指定的序列[first, last)插入到迭代器it指向的元素前面 a.erase(it) // 删除由迭代器it所指向的元素 a.erase(first, last)// 删除由迭代器first和last所指定的序列[first, last) a.remove(x) // 删除了a中所有值为x的元素 a.assign(n, val) // 将a中的所有元素替换成n个val元素 a.assign(b.begin(), b.end()) //将a变成b","s":"list","u":"/docs/Algorithms/机试技巧与STL","h":"#list","p":243},{"i":281,"t":"头文件:string string是STL的字符串类型,通常用来表示字符串。而在使用string之前,字符串通常是用char*表示的。 string和char*的区别 string是一个类, char*是一个指向字符的指针。 string封装了char*,管理这个字符串,是一个char*型的容器。也就是说string是一个容器,里面元素的数据类型是char*。 string不用考虑内存释放和越界。 string管理char*所分配的内存。每一次string的复制,取值都由string类负责维护,不用担心复制越界和取值越界等。 string提供了一系列的字符串操作函数 查找find,拷贝copy,删除erase,替换replace,插入insert. 构造和析构函数: 表达式 效果 string s 生成一个空字符串 string s(str) copy构造函数,生成一个str的复制品 string s(str,idx) 将string内始于位置idx的部分当作字符串s的初值 string s(str,idx,len) 将string内始于位置idx且长度最多为len的部分当作字符串s的初值 string s(cstr) 以C-string字符串cstr作为字符串s的初值 string s(cstr,len) 以C-string字符串cstr的前len个字符作为字符串s的初值 string s(num,c) 生成一个字符串,包含num个字符c string s(beg,end) 以区间[beg,end]内所有字符作为字符串s的初值 操作函数: 操作函数 效果 =,assign() 赋以新值 swap() 交换两个字符串的内容 +=, append(),push_back() 添加字符 insert() 插入字符 erase() 删除字符 clear() 移除全部字符 resize() 改变字符数量 replace() 替换字符 + 串联字符串 ==,!=,<,<=,>,>=,compare() 比较字符串内容 size(),length() 返回字符数量,等效函数 max_size() 返回字符的最大可能个数 empty() 判断字符串是否为空 capacity() 返回重新分配之前的字符容量 reserve() 保留一定量内存以容纳一定数量的字符 [ ],at() 存取单一字符 >>,getline() 从stream中读取某值 << 将某值写入stream copy() 将内容复制为一个C-string c_str() 将内容以C-string形式返回 data() 将内容以字符数组形式返回 substr() 返回某个子字符串 begin(),end() 提供正常的迭代器支持 rbegin(),rend() 提供逆向迭代器支持","s":"string","u":"/docs/Algorithms/机试技巧与STL","h":"#string","p":243},{"i":283,"t":"头文件:utility STL的utility头文件中描述了一个看上去非常简单的模版类pair,用来表示一个二元组或元素对,并提供了按照字典序对元素对进行大小比较运算符模版函数。 Example,想要定义一个对象表示一个平面坐标点,则可以: pair p; cin >> p.first >> p.second; pair模版类需要两个参数:首元素的数据类型和尾元素的数据类型。pair模版类对象有两个成员:first和second,分别表示首元素和尾元素。 在其中已经定义了pair上的六个比较运算符:<、>、<=、>=、==、!=,其规则是先比较first,first相等时再比较second,这符合大多数应用的逻辑。当然,也可以通过重载这几个运算符来重新指定自己的比较逻辑。 除了直接定义一个pair对象外,如果需要即时生成一个pair对象,也可以调用在其中定义的一个模版函数:make_pair。make_pair需要两个参数,分别为元素对的首元素和尾元素。","s":"pair","u":"/docs/Algorithms/机试技巧与STL","h":"#pair","p":243},{"i":285,"t":"头文件:map 在STL的头文件中map中定义了模版类map和multimap,用有序二叉树表存储类型为pair的元素对序列。序列中的元素以const Key部分作为标识,map中所有元素的Key值必须是唯一的,multimap则允许有重复的Key值。 可以将map看作是由Key标识元素的元素集合,这类容器也被称为“关联容器”,可以通过一个Key值来快速决定一个元素,因此非常适合于需要按照Key值查找元素的容器。 map模版类需要四个模版参数,第一个是键值类型,第二个是元素类型,第三个是比较算子,第四个是分配器类型。其中键值类型和元素类型是必要的。 定义map对象的代码示例: map m; map的基本操作: /* 向map中插入元素 */ m[key] = value; // [key]操作是map很有特色的操作,如果在map中存在键值为key的元素对, 则返回该元素对的值域部分,否则将会创建一个键值为key的元素对,值域为默认值。所以可以用该操作向map中插入元素对或修改已经存在的元素对的值域部分。 m.insert(make_pair(key, value)); // 也可以直接调用insert方法插入元素对,insert操作会返回一个pair,当map中没有与key相匹配的键值时,其first是指向插入元素对的迭代器,其second为true;若map中已经存在与key相等的键值时,其first是指向该元素对的迭代器,second为false。 /* 查找元素 */ int i = m[key]; // 要注意的是,当与该键值相匹配的元素对不存在时,会创建键值为key(当另一个元素是整形时,m[key]=0)的元素对。 map::iterator it = m.find(key); // 如果map中存在与key相匹配的键值时,find操作将返回指向该元素对的迭代器,否则,返回的迭代器等于map的end()(参见vector中提到的begin()和end()操作)。 /* 删除元素 */ m.erase(key); // 删除与指定key键值相匹配的元素对,并返回被删除的元素的个数。 m.erase(it); // 删除由迭代器it所指定的元素对,并返回指向下一个元素对的迭代器。 /* 其他操作 */ m.size(); // 返回元素个数 m.empty(); // 判断是否为空 m.clear(); // 清空所有元素","s":"map","u":"/docs/Algorithms/机试技巧与STL","h":"#map","p":243},{"i":287,"t":"头文件:stack stack模版类的定义在stack头文件中。 stack模版类需要两个模版参数,一个是元素类型,另一个是容器类型,但是只有元素类型是必要的,在不指定容器类型时,默认容器的类型为deque。 定义stack对象的示例代码如下: stack s; stack ss; stack的基本操作有: s.push(x); // 入栈 s.pop(); // 出栈 s.top(); // 访问栈顶 s.empty(); // 当栈空时,返回true s.size(); // 访问栈中元素个数","s":"stack","u":"/docs/Algorithms/机试技巧与STL","h":"#stack","p":243},{"i":289,"t":"头文件:queue queue模版类的定义在queue头文件中。 queue与stack相似,queue模版类也需要两个模版参数,一个元素类型,一个容器类型,元素类型时必须的,容器类型时可选的,默认为deque类型。 定义queue对象的示例代码必须如下: queue q; queue qq; queue的基本操作: q.push(x); // 入队列 q.pop(); // 出队列 q.front(); // 访问队首元素 q.back(); // 访问队尾元素 q.empty(); // 判断队列是否为空 q.size(); // 访问队列中的元素个数","s":"queue","u":"/docs/Algorithms/机试技巧与STL","h":"#queue","p":243},{"i":291,"t":"头文件:set set是与集合相关的容器,STL为我们提供了set的实现,在编程题中遇见集合问题直接调用是十分方便的。 定义set对象的示例代码如下: set s; set ss; set的基本操作: s.begin() // 返回指向第一个元素的迭代器 s.clear() // 清除所有元素 s.count() // 返回某个值元素的个数 s.empty() // 如果集合为空,返回true(真) s.end() // 返回指向最后一个元素之后的迭代器,不是最后一个元素 s.equal_range() // 返回集合中与给定值相等的上下限的两个迭代器 s.erase() // 删除集合中的元素 s.find() // 返回一个指向被查找到元素的迭代器 s.get_allocator() // 返回集合的分配器 s.insert() // 在集合中插入元素 s.lower_bound() // 返回指向大于(或等于)某值的第一个元素的迭代器 s.key_comp() // 返回一个用于元素间值比较的函数 s.max_size() // 返回集合能容纳的元素的最大限值 s.rbegin() // 返回指向集合中最后一个元素的反向迭代器 s.rend() // 返回指向集合中第一个元素的反向迭代器 s.size() // 集合中元素的数目 s.swap() // 交换两个集合变量 s.upper_bound() // 返回大于某个值元素的迭代器 s.value_comp() // 返回一个用于比较元素间的值的函数","s":"set","u":"/docs/Algorithms/机试技巧与STL","h":"#set","p":243},{"i":293,"t":"头文件:set 在set头文件中,还定义了另一个非常实用的模版类multiset(多重集合)。多重集合与集合的区别在于集合中不能存在相同元素,而多重集合中可以存在。 定义multiset对象的示例代码如下: multiset s; multiset ss; multiset和set的基本操作相似,需要注意的是,集合的count()能返回0(无)或者1(有),而多重集合是有多少个返回多少个。","s":"multiset","u":"/docs/Algorithms/机试技巧与STL","h":"#multiset","p":243},{"i":295,"t":"头文件:bitset 在 STLSTL 的头文件中 bitset中定义了模版类 bitsetbitset,用来方便地管理一系列的 bitbit 位的类。bitsetbitset 除了可以访问指定下标的 bitbit 位以外,还可以把它们作为一个整数来进行某些统计。 bitsetbitset 模板类需要一个模版参数,用来明确指定含有多少位。 定义 bitsetbitset 对象的示例代码: const int MAXN = 32; bitset bt; // bt 包括 MAXN 位,下标 0 ~ MAXN - 1,默认初始化为 0 bitset bt1(0xf); // 0xf 表示十六进制数 f,对应二进制 1111,将 bt1 低 4 位初始化为 1 bitset bt2(012); // 012 表示八进制数 12,对应二进制 1010,即将 bt2 低 4 位初始化为 1010 bitset bt3(\"1010\"); // 将 bt3 低 4 位初始化为 1010 bitset bt4(s, pos, n);// 将 01 字符串 s 的 pos 位开始的 n 位初始化 bt4 bitsetbitset 基本操作: bt.any() // bt 中是否存在置为 1 的二进制位? bt.none() // bt 中不存在置为 1 的二进制位吗? bt.count() // bt 中置为 1 的二进制位的个数 bt.size() // bt 中二进制位的个数 bt[pos] // 访问 bt 中在 pos 处的二进制位 bt.test(pos) // bt 中在 pos 处的二进制位是否为 1 bt.set() // 把 bt 中所有二进制位都置为 1 bt.set(pos) // 把 bt 中在 pos 处的二进制位置为 1 bt.reset() // 把 bt 中所有二进制位都置为 0 bt.reset(pos) // 把 bt 中在pos处的二进制位置为0 bt.flip() // 把 bt 中所有二进制位逐位取反 bt.flip(pos) // 把 bt 中在 pos 处的二进制位取反 bt[pos].flip() // 同上 bt.to_ulong() // 用 bt 中同样的二进制位返回一个 unsigned long 值 os << bt // 把 bt 中的位集输出到 os 流","s":"bitset","u":"/docs/Algorithms/机试技巧与STL","h":"#bitset","p":243},{"i":298,"t":"#include #include #include using namespace std; #define MAX(a, b) ((a) > (b) ? (a) : (b) ) //定义图的定点 typedef struct Vertex { int id; vector connectors; //存储节点的后续连接顶点编号 Vertex() : id(-1) {} Vertex(int nid) : id(nid) {} } Vertex; //定义Graph的邻接表表示 typedef struct Graph { vector vertexs; //存储定点信息 int nVertexs; //计数:邻接数 bool isDAG; //标志:是有向图吗 Graph(int n, bool isDAG) : nVertexs(n), isDAG(isDAG) { vertexs.resize(n); } //向图中添加边 bool addEdge(int id1, int id2) { if (!(MAX(id1, id2) < vertexs.size())) return false; if (isDAG) { vertexs[id1].connectors.push_back(id2); } else { vertexs[id1].connectors.push_back(id2); vertexs[id2].connectors.push_back(id1); } return true; } //广度优先搜索 vector BFS(int start) { set visited; vector g, rst; g.push_back(start); visited.insert(start); while(g.size() > 0) { int id = g[0]; g.erase(g.begin()); rst.push_back(id); for(int i = 0; i < vertexs[id].connectors.size(); i++) { int id1 = vertexs[id].connectors[i]; if (visited.count(id1) == 0) { g.push_back(id1); visited.insert(id1); } } } return rst; } //深度优先搜索 vector DFS(int start) { set visited; vector g, rst; g.push_back(start); //cout << \"push \" << start << \" \"; visited.insert(start); rst.push_back(start); bool found; while(g.size() > 0) { int id = g[g.size()-1]; found = false; for(int i = 0; i < vertexs[id].connectors.size(); i++) { int id1 = vertexs[id].connectors[i]; if (visited.count(id1) == 0) { g.push_back(id1); rst.push_back(id1); visited.insert(id1); //cout << \"push \" << id1 << \" \"; found = true; break; } } if (!found) { int id2 = g[g.size()-1]; rst.push_back(-1 * id2); //cout << \"pop \" << id2 << \" \"; g.pop_back(); } } //cout << endl; return rst; } } Graph; int main() { Graph g(8, false); g.addEdge(0, 1); g.addEdge(0, 3); g.addEdge(1, 2); g.addEdge(3, 4); g.addEdge(3, 5); g.addEdge(4, 5); g.addEdge(4, 6); g.addEdge(5, 6); g.addEdge(5, 7); g.addEdge(6, 7); vector bv = g.BFS(0); cout << \"宽度优先搜索节点顺序:\"; for(int j = 0; j < bv.size(); j++) cout << bv[j] << \" \"; cout << endl; cout << \"深度优先搜索节点顺序:\"; Graph g1(6, false); g1.addEdge(0, 1); g1.addEdge(0, 4); g1.addEdge(0, 5); g1.addEdge(1, 5); g1.addEdge(4, 5); g1.addEdge(5, 2); g1.addEdge(5, 3); g1.addEdge(2, 3); vector route = g1.DFS(0); for(int i = 0; i < route.size(); i++) cout << route[i] << \" \"; cout << endl; char ch; cin >> ch; return 0; }","s":"不带出入度的最简模板","u":"/docs/Algorithms/机试技巧与STL","h":"#不带出入度的最简模板","p":243},{"i":300,"t":"#include #include #include #include #define MAX(a, b) ((a) > (b) ? (a) : (b) ) using namespace std; int n,m; vector inDegreelist,outDegreelist; //定义图的定点 typedef struct Vertex { int id,inDegree,outDegree; vector connectors; //存储节点的后续连接顶点编号 Vertex() : id(-1),inDegree(0),outDegree(0) {} Vertex(int nid) : id(nid),inDegree(0),outDegree(0) {} } Vertex; //定义Graph的邻接表表示 typedef struct Graph { vector vertexs; //存储定点信息 int nVertexs; //计数:邻接数 bool isDAG; //标志:是有向图吗 Graph(int n, bool isDAG) : nVertexs(n), isDAG(isDAG) { vertexs.resize(n); } Graph() : nVertexs(1), isDAG(1) { vertexs.resize(1); } //向图中添加边 bool addEdge(int id1, int id2) { if (!(MAX(id1, id2) < vertexs.size())) return false; if (isDAG) { vertexs[id1].connectors.push_back(id2); vertexs[id1].outDegree++; vertexs[id2].inDegree++; } else { vertexs[id1].connectors.push_back(id2); vertexs[id2].connectors.push_back(id1); vertexs[id1].outDegree++; vertexs[id1].inDegree++; vertexs[id2].outDegree++; vertexs[id2].inDegree++; } return true; } } Graph; Graph g; void init(){ cin>>n>>m; g=Graph(n, true); int src,dst; while(m--){ cin>>src>>dst; g.addEdge(src,dst); } vector::iterator it = g.vertexs.begin(); while(it!=g.vertexs.end()){ inDegreelist.push_back(it->inDegree); outDegreelist.push_back(it->outDegree); it++; } } int countin(int n){ return count(inDegreelist.begin(),inDegreelist.end(),n); } int countout(int n){ return count(outDegreelist.begin(),outDegreelist.end(),n); } bool Is_List(){ //有一个inDegree为0的头和一个outDegree为0的尾,且其余节点入度与出度都为1; return (countin(0)==1)&&(countout(0)==1)&&(countin(1)==n-1)&&(countout(1)==n-1); } bool Is_Tree(){ //有一个inDegree为0的头且其余节点inDegree均为1,且不是链表; return (countin(0)==1)&&(countin(1)==n-1); } bool topologicalSort(){//拓扑排序判断有环无环 int num=0;//记录加入拓扑排序的顶点数 queue q; for(int i=0;i #include #ifndef BASE #define BASE #define TRUE 1 #define FALSE 0 #define OK 1 #define ERROR 0 #define INFEASIBLE -1 #define OVERFLOW -2 typedef int Status; typedef int bool; #endif #define VertexType char //点类型 #define VRType int //边类型 #define maxSize 100 void Visit(VertexType e) { printf(\"%c\", e); } #define MAX_VERTEX_NUM 20 typedef enum{DG, UDG} GraphKind; typedef struct ArcNode{ int adjV; //边指向的顶点 VRType weight; //权重 struct ArcNode *next; }ArcNode; //边 typedef struct VNode{ VertexType data; ArcNode *firstarc; }VNode, AdjList[MAX_VERTEX_NUM]; //顶点 typedef struct{ GraphKind kind; int vernum,arcnum; AdjList vers; }ALGraph; /*------------------------ |7.14 创建有向图的邻接表| ------------------------*/ Status InitGraph_AL(ALGraph *pG) { //初始化 int i; pG->arcnum = 0; pG->vernum = 0; for (i=0; ivers[i].firstarc = NULL; //VC++6.0中指针初始化为0xcccccccc return OK; } int LocateVex_AL(ALGraph G, VertexType e) { //定位值为e的元素下标 int i; for (i=0; i弧的数目->各顶点的信息->各条弧的信息 int i,a,b; char tmp[MAX_VERTEX_NUM]; char h,t; ArcNode *p, *q; InitGraph_AL(pG); //VC++6.0中指针初始化为0xcccccccc,如果不将指针初始化为NULL,会出错 //图的类型 pG->kind = DG; //顶点数目 scanf(\"%d\", &i); if (i<0) return ERROR; pG->vernum = i; //弧的数目 scanf(\"%d\", &i); if (i<0) return ERROR; pG->arcnum = i; //各顶点信息 scanf(\"%s\", tmp); for (i=0; ivernum; ++i) pG->vers[i].data=tmp[i]; //弧的信息 for (i=0; iarcnum; ++i) { scanf(\"%s\", tmp); h = tmp[0]; t = tmp[2]; a = LocateVex_AL(*pG, h); b = LocateVex_AL(*pG, t); if (a<0 || b<0) return ERROR; p = (ArcNode *)malloc(sizeof(ArcNode)); if (!p) exit(OVERFLOW); p->adjV=b;p->next=NULL; if (pG->vers[a].firstarc) { //已经有边了 for (q = pG->vers[a].firstarc; q->next; q=q->next) ; //找到最后一条 q->next = p; } else { //第一条边 pG->vers[a].firstarc = p; } } return OK; } /*---------------------------------------------------------------- |7.28 有向图-从u-v的所有简单路径 | ----------------------------------------------------------------*/ int visit[MAX_VERTEX_NUM]; //前面定义了 VertexType paths[maxSize][MAX_VERTEX_NUM]; //存放路径 int path[MAX_VERTEX_NUM]; //路径 int pathnum=0; //当前是第几条路径 void FindAllPath(ALGraph G, int u,int v,int k) { //u->v当前是第k个位置 int i; ArcNode *p; visit[u]=1; //走到了u path[k]=u; //添加到路径->下标位置为k的结点是u(第k+1个是u) if (u==v) { //找到了 for (i=0; i<=k; i++) {//复制到paths paths[pathnum][i] = G.vers[path[i]].data; } paths[pathnum][i]='\\0'; //结束符 pathnum++; //找下一条路径 } else { //u的邻边开始找 for (p=G.vers[u].firstarc; p; p=p->next) { if (visit[p->adjV]==0) FindAllPath(G, p->adjV, v, k+1); //去这个邻接点找 } } // 回溯到上一个结点 // 注意:回溯应该写在外面-->也就是不管有没有找到都要回溯 visit[u]=0; path[k]=0; } int main() { /*7.28 6 11 ABCDEF B,A B,D C,B C,F D,C D,E D,F E,A F,A F,B F,E B->A A->B D->A */ int i,j; int cnt; ALGraph G; char tmp[20]; CreateDG_AL(&G); while (1) { scanf(\"%s\", tmp); //A->B i = LocateVex_AL(G, tmp[0]); j = LocateVex_AL(G, tmp[3]); for (cnt=0; cnt #include #define mem(a,b) memset(a,b,sizeof a); using namespace std; typedef long long ll; const int maxn=50; int mid[maxn],po[maxn],pr[maxn]; int first; struct node { int l,r; }T[maxn]; // 中序+先序=>二叉树 int mid_pr_build(int la,int ra,int lb,int rb) // la,ra:表示中序遍历 lb,rb:表示先序遍历 { // 这里不能等于,因为假设:len==1,则la==ra,直接返回,但是实际上是有一个 rt 的,却没被建立 if(la>ra) return 0; int rt=pr[lb]; // 因为先序遍历第一个是根节点 int p1=la,p2; while(mid[p1]!=rt) p1++; // 在中序遍历中找到根节点 p2=p1-la; T[rt].l=mid_pr_build(la,p1-1,lb+1,lb+p2); // 左子树(锁定左子树范围的下标) T[rt].r=mid_pr_build(p1+1,ra,lb+p2+1,rb); // 右子树(锁定右子树范围的下标) return rt; } // 中序+后序=>二叉树 int mid_po_build(int la,int ra,int lb,int rb) // la,ra:表示中序遍历 lb,rb:表示后序遍历 { if(la>ra) return 0; int rt=po[rb]; // 因为后序遍历最后一个是根节点 int p1=la,p2; while(mid[p1]!=rt) p1++; // 在中序遍历中找到根节点 p2=p1-la; T[rt].l=mid_po_build(la,p1-1,lb,lb+p2-1); // 左子树(锁定左子树范围的下标) T[rt].r=mid_po_build(p1+1,ra,lb+p2,rb-1); // 右子树(锁定右子树范围的下标) return rt; } // 求树高 int getHeight(int rt) { if(rt==0) return 0; return 1+max(getHeight(T[rt].l),getHeight(T[rt].r)); } // 层序遍历 void bfs(int rt) { queue q; vector v; q.push(rt); while(!q.empty()) { int w=q.front(); q.pop(); v.push_back(w); if(T[w].l!=0) q.push(T[w].l); if(T[w].r!=0) q.push(T[w].r); } int len=v.size(); for(int i=0;i #include #define mem(a,b) memset(a,b,sizeof a); using namespace std; typedef long long ll; const int maxn=50; int mid[maxn],po[maxn],pr[maxn]; int first; struct node { int l,r; }T[maxn]; int mid_pr_build(int la,int ra,int lb,int rb) { if(la>ra) return 0; int rt=pr[lb]; int p1=la,p2; while(mid[p1]!=rt) p1++; p2=p1-la; T[rt].l=mid_pr_build(la,p1-1,lb+1,lb+p2); T[rt].r=mid_pr_build(p1+1,ra,lb+p2+1,rb); return rt; } int mid_po_build(int la,int ra,int lb,int rb) { if(la>ra) return 0; int rt=po[rb]; int p1=la,p2; while(mid[p1]!=rt) p1++; p2=p1-la; T[rt].l=mid_po_build(la,p1-1,lb,lb+p2-1); T[rt].r=mid_po_build(p1+1,ra,lb+p2,rb-1); return rt; } int getHeight(int rt) { if(rt==0) return 0; return 1+max(getHeight(T[rt].l),getHeight(T[rt].r)); } void bfs(int rt) { queue q; vector v; q.push(rt); while(!q.empty()) { int w=q.front(); q.pop(); v.push_back(w); if(T[w].l!=0) q.push(T[w].l); if(T[w].r!=0) q.push(T[w].r); } int len=v.size(); for(int i=0;i #include #define mem(a,b) memset(a,b,sizeof a) #define ssclr(ss) ss.clear(), ss.str(\"\") #define INF 0x3f3f3f3f #define MOD 1000000007 using namespace std; typedef long long ll; const int maxn=5e4+1000; int f; int pre[maxn], in[maxn]; struct node { int l,r,d; }T[maxn]; int create(int l1,int r1,int l2,int r2) // in pre { if(l2>r2) return -1; int rt=l2; int p1=l1,p2; while(in[p1]!=pre[rt]) p1++; p2=p1-l1; T[rt].d=pre[rt]; T[rt].l=create(l1,p1-1,l2+1,l2+p2); T[rt].r=create(p1+1,r1,l2+p2+1,r2); return rt; } void postT(int rt) { if(rt==-1 || !f) return; postT(T[rt].l); postT(T[rt].r); if(f) f=0, printf(\"%d\\n\",T[rt].d); } int main() { int n; scanf(\"%d\",&n); for(int i=0;i 0是手动调节的。我们把这个损失函数叫做L1。请注意,除了w = 0时,| w | 在任何地方都是可微的,如下所示。我们以后会需要这个。 d∣w∣dw={1w>0−1w<0\\frac{d|w|}{d w}=\\left\\{\\begin{array}{ll} 1 & w>0 \\\\ -1 & w<0 \\end{array}\\right.dwd∣w∣​={1−1​w>0w<0​ 具有L2正则化的损失函数​ 同样,将L2正则化项添加到L看起来是这样的: L2=(wx+b−y)2+λw2L_{2}=(w x+b-y)^{2}+\\lambda w^{2}L2​=(wx+b−y)2+λw2 同样,λ> 0。 梯度下降​ 现在,让我们根据上面定义的3个损失函数,使用梯度下降优化来求解线性回归模型。回想一下,更新梯度下降中的参数w如下: wnew =w−η∂L∂w\\begin{aligned}w_{\\text {new }}=w-\\eta \\frac{\\partial L}{\\partial w}\\end{aligned}wnew ​=w−η∂w∂L​​ x 1import torch.nn as nn2​3# 使用NLLLoss实现4nllloss = nn.NLLLoss()5predict = torch.Tensor([[2, 3, 1], [3, 7, 9]])6predict = torch.log(torch.softmax(predict, dim=-1))7label = torch.tensor([1, 2])8nllloss(predict, label)9# output: tensor(0.2684)10​11# 使用CrossEntropyLoss实现12cross_loss = nn.CrossEntropyLoss()13predict = torch.Tensor([[2, 3, 1], [3, 7, 9]])14label = torch.tensor([1, 2])15cross_loss(predict, label)16# output: tensor(0.2684)python L:L:L: wnew =w−η∂L∂w=w−η⋅[2x(wx+b−y)]\\begin{aligned} w_{\\text {new }} &=w-\\eta \\frac{\\partial L}{\\partial w} \\\\ &=w-\\eta \\cdot[2 x(w x+b-y)] \\end{aligned}wnew ​​=w−η∂w∂L​=w−η⋅[2x(wx+b−y)]​ L1:L1:L1: wnew =w−η∂L1∂w=w−η⋅[2x(wx+b−y)+λd∣w∣dw]={w−η⋅[2x(wx+b−y)+λ]w>0w−η⋅[2x(wx+b−y)−λ]w<0\\begin{aligned} w_{\\text {new }} &=w-\\eta \\frac{\\partial L_{1}}{\\partial w} \\\\ &=w-\\eta \\cdot\\left[2 x(w x+b-y)+\\lambda \\frac{d|w|}{d w}\\right] \\\\ &=\\left\\{\\begin{aligned} w-\\eta \\cdot[2 x(w x+b-y)+\\lambda] & w>0 \\\\ w-\\eta \\cdot[2 x(w x+b-y)-\\lambda] & w<0 \\end{aligned}\\right. \\end{aligned}wnew ​​=w−η∂w∂L1​​=w−η⋅[2x(wx+b−y)+λdwd∣w∣​]={w−η⋅[2x(wx+b−y)+λ]w−η⋅[2x(wx+b−y)−λ]​w>0w<0​​ L2:L2:L2: wnew =w−η∂L2∂w=w−η⋅[2x(wx+b−y)+2λw]\\begin{aligned} w_{\\text {new }} &=w-\\eta \\frac{\\partial L_{2}}{\\partial w} \\\\ &=w-\\eta \\cdot[2 x(w x+b-y)+2 \\lambda w] \\end{aligned}wnew ​​=w−η∂w∂L2​​=w−η⋅[2x(wx+b−y)+2λw]​","s":"损失函数","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#损失函数","p":348},{"i":357,"t":"从这里开始,让我们对上面的方程式进行以下替换 (以获得更好的可读性): η=1\\eta=1η=1 H=2x(wx+b−y)H=2 x(w x+b-y)H=2x(wx+b−y) 接着我们就可以得到: L:L:L: wnew =w−Hw_{\\text {new }}=w-Hwnew ​=w−H L1: wnew ={(w−H)−λ,w>0(w−H)+λ,w<0w_{\\text {new }}=\\left\\{\\begin{array}{ll} (w-H)-\\lambda, & w>0 \\\\ (w-H)+\\lambda, & w<0 \\end{array}\\right.wnew ​={(w−H)−λ,(w−H)+λ,​w>0w<0​ L2:L2:L2: wnew =(w−H)−2λww_{\\text {new }}=(w-H)-2 \\lambda wwnew ​=(w−H)−2λw","s":"如何避免过拟合","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#如何避免过拟合","p":348},{"i":359,"t":"观察有正则化参数 λ 和没有正则化参数 λ 的权重更新之间的差异。这里有一些地方可以很直观的看出。 Intuition A: 假设用等式0,计算w-H给我们一个w值,导致过拟合。然后,直觉上,公式将减少过拟合的机会,因为引入 λ 使我们远离了前面说过的由于w导致的过拟合问题。 Intuition B: 一个过度拟合的模型意味着我们有一个非常适合我们模型的w值。“完美” 的意思是,如果我们将数据 (x) 替换回模型中,我们的预测将非常非常接近真实的y。当然,这很好,但是我们不想要完美。为什么?因为这意味着我们的模型仅适用于我们训练的数据集。这意味着我们的模型将产生与其他数据集的真实值相去甚远的预测。因此,我们满足于不那么完美,希望我们的模型也能与其他数据进行接近的预测。为此,我们用惩罚项 λ 在等式0中 “taint” 这个完美的w。就如公式15和16所示。 Intution C: 请注意,H 取决于模型 (w和b) 和数据 (x和y)。仅根据公式中的模型和数据更新权重会导致过拟合,从而导致模型泛化性不好。另一方面,在等式15,16中,w的最终值不仅受模型和数据的影响,而且还受与模型和数据无关的预定义参数 λ 的影响。因此,尽管值过大会导致模型严重欠拟合,如果我们设置适当的 λ 值就可以防止过拟合。 Intution D: 不同潜在训练集的权重会更相似——这意味着模型的方差减少了(相反,如果我们每次随机移动权重只是为了摆脱过度拟合的解决方案,方差不会改变)。 我们将为每个功能提供更小的权重。为什么这会减少过度拟合?我觉得很容易思考的方式是,在典型情况下,我们将有少量简单的特征,这些特征将解释大部分方差 (例如,y的大部分将由y_hat = ax+b解释); 但是如果我们的模型没有正则化,我们可以添加我们想要的更多功能来解释数据集的残差方差 (例如y_at = ax+bx ²+ cx ³ + e),这自然会使得模型过度拟合训练。引入权重之和的惩罚意味着模型必须最佳地 “分配” 其权重,因此自然地,该 “资源” 的大部分将用于解释大部分方差的简单特征,而复杂特征的权重很小或为零。","s":"有正则化与没有正则化","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#有正则化与没有正则化","p":348},{"i":361,"t":"比较上面每个等式的第二项。除H外,w的变化取决于 ± λ 项或-2λw项,这突出了以下内容的影响: sign of current w (L1, L2) magnitude of current w (L2) doubling of the regularisation parameter (L2) 虽然使用L1的权重更新会受到第一点的影响,但来自L2的权重更新受所有这三个点的影响。虽然我只是根据迭代方程更新进行了比较,但请注意,这并不意味着一个比另一个 “更好”。 现在,让我们在下面看看如何仅通过当前w的符号就可以实现L1的正则化效应。","s":"L1 vs L2","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1-vs-l2","p":348},{"i":363,"t":"看看方程3.1中的L1。如果w为正,则正则化参数 λ>0将通过从w中减去 λ 来让w更小。相反,在等式3.2中,如果w为负,则 λ 将被加到w上,从而使其较少为负。因此,这具有将w推向0的效果。 这在1元线性回归模型中当然毫无意义,但其具有在多元回归模型中 “去除” 无用变量的能力。你也可以认为L1完全减少了模型中的特征数量。以下是L1试图在多元线性回归模型中 “推” 一些变量的示例: y^=0.4561x1−0.0007x2+0.3251x3+0.0009x4+0.0001x5−0.9142x6−0.553\\hat{y}=0.4561 x_{1}-0.0007 x_{2}+0.3251 x_{3}+0.0009 x_{4}+0.0001 x_{5}-0.9142 x_{6}-0.553y^​=0.4561x1​−0.0007x2​+0.3251x3​+0.0009x4​+0.0001x5​−0.9142x6​−0.553 那么,将w推向0如何有助于L1正则化中的过拟合?如上所述,随着w变为0,我们正在通过降低变量的重要性来减少功能的数量。在上面的方程式中,我们看到x_2,x_4和x_5由于系数小而几乎 “无用”,因此我们可以将它们从方程式中删除。这反过来降低了模型的复杂性,使我们的模型更简单。更简单的模型可以减少过拟合的机会。 Note: 虽然L1具有将权重推向0的影响,而L2没有,但这并不意味着由于L2的权重不能达到或者接近0。","s":"L1的稀疏性","u":"/docs/Deep Learning/基础知识/对于正则化的理解","h":"#l1的稀疏性","p":348},{"i":366,"t":"Logistic Regression直译为逻辑回归,是一种用来解决二分类问题的机器学习方法,用于估计某种事物的可能性。 逻辑回归经过sigmoid函数输出的结果可将其视为probability,而后根据设定的置信度阈值来判断该特征向量对应的标签是1还是0,用以解决二分类问题。","s":"一、什么是Logistic Regression","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#一什么是logistic-regression","p":364},{"i":368,"t":"线性回归要求因变量是连续性数值变量,而逻辑回归要求因变量是离散的变量。 逻辑回归以线性回归为理论支持,通过Sigmoid函数引入了非线性因素。 线性回归常用MSE函数作为损失函数,而逻辑回归作为分类任务的解决方案通常搭配交叉熵损失函数进行训练。","s":"二、逻辑回归(Logistic Regression)和线性回归(Linear Regression)","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#二逻辑回归logistic-regression和线性回归linear-regression","p":364},{"i":370,"t":"从历史角度方面看,逻辑回归在诞生时使用MSE作为损失函数,其目标是让输出的概率更接近于1,与回归任务的目标相似。","s":"三、逻辑回归到底是回归任务(Regression)还是分类任务(Classification)?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#三逻辑回归到底是回归任务regression还是分类任务classification","p":364},{"i":372,"t":"逻辑回归以及其他分类任务在测试角度上的目标让提高分类准确率acc,但并不会将maximize accuracy作为数学上的训练方法,即在训练过程中不使用与acc有关的损失函数。 逻辑回归中的训练目标(评估函数)与预测目标(评估函数)并不相同,但方向一致。 acc=∑I(predi==yi)len(Y)(1)acc = \\frac{\\sum{I(pred_i==y_i)}}{len(Y)} \\tag{1}acc=len(Y)∑I(predi​==yi​)​(1) 如果在训练过程中以最大化acc为目标,当参数在训练过程中向标签方向更新使得逻辑回归输出的正确类的概率增大时,考虑以下两种情况: gradient = 0 if accuracy unchanged but weights changed: 由于阈值的存在,下一轮迭代输出的概率可能仍小于阈值,从而导致分类结果与上一次迭代相同,此时acc并无变化,出现梯度为0的情况。 gradient not continuous since the number of correct is not continunous: 当上一轮迭代的输出概率很接近阈值时,下一次迭代的概率提升了很少一点但是仍超过了阈值,且一个batch中有大量样本均存在这种情况,此时acc有显著提升而网络的权重的更新极小,此时,与acc有关的Loss函数对权重求导得到的梯度会出现梯度爆炸或者说不连续的情况。","s":"四、为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数?","u":"/docs/Deep Learning/基础知识/Logistic Regression","h":"#四为什么逻辑回归或其他分类任务不使用分类准确率作为损失函数","p":364},{"i":374,"t":"提示 正则化与权重衰退","s":"正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"","p":373},{"i":376,"t":"正则化(Regularization)是机器学习中用于控制模型过拟合的一种技术。在模型训练过程中,我们通常要最小化一个损失函数来得到最佳的模型参数。但是当模型过于复杂时,容易出现过拟合现象,即在训练数据上表现很好,但在测试数据上表现很差。这是因为模型过于依赖训练数据的噪声和细节,而忽略了真正的规律。 正则化通过在损失函数中增加一个惩罚项(Penalty)来对模型进行约束,防止其过分依赖训练数据。 常见的正则化方法包括L1正则化(硬性限制)、L2正则化(柔性限制)等。 L1正则化会使得一部分参数变为0,从而实现特征选择的效果;L2正则化则会使得模型参数尽量接近0,也就是使得模型更加平滑。在使用正则化时,需要调整正则化强度的超参数,以达到最优的泛化性能。","s":"一、什么是正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#一什么是正则化","p":373},{"i":378,"t":"min l(w,b) subject to ∥w∥12≤θ(1)min \\space l(w, b) \\space \\text{subject to} \\space \\Vert w \\Vert^2_1 \\leq \\theta \\tag{1}min l(w,b) subject to ∥w∥12​≤θ(1) L1正则化限制权重参数的L1范数小于某一特定的超参数 通常不限制偏移bbb 更小的超参数θ\\thetaθ意味着更强的正则项","s":"二、L1正则化","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#二l1正则化","p":373},{"i":380,"t":"L2正则化是指在模型的损失函数中,加入对模型参数的L2范数进行惩罚的一种方法。公式如下所示: l(w,b)+λ2∥w∥12(2)l(w, b) + \\frac{\\lambda}{2} \\Vert w \\Vert^2_1 \\tag{2}l(w,b)+2λ​∥w∥12​(2) 其中,λ\\lambdaλ是一个正则化系数超参数 此时在更新梯度时,具有如下公式 ∂∂w(l(w,b)+λ2∥w∥12)=∂l(w,b)∂w+λw(3)\\frac{\\partial}{\\partial w} \\big(l(w, b) + \\frac{\\lambda}{2} \\Vert w \\Vert^2_1 \\big) = \\frac{\\partial l(w, b)}{\\partial w} + \\lambda w \\tag{3}∂w∂​(l(w,b)+2λ​∥w∥12​)=∂w∂l(w,b)​+λw(3) wt+1=(1−ηλ)wt+η∂l(wt,bt)∂wt(4)w_{t+1}=(1-\\eta \\lambda)w_t + \\eta \\frac{\\partial l(w_t, b_t)}{\\partial w_t} \\tag{4}wt+1​=(1−ηλ)wt​+η∂wt​∂l(wt​,bt​)​(4) 通常ηλ<1\\eta \\lambda < 1ηλ<1,因此又叫做权重衰退","s":"三、L2正则化与权重衰退","u":"/docs/Deep Learning/基础知识/正则化与权重衰退","h":"#三l2正则化与权重衰退","p":373},{"i":383,"t":"AlexNet是指2012年由Alex Krizhevsky、Ilya Sutskever和Geoffrey Hinton提出的一种卷积神经网络模型,它主要应用于图像分类任务。在当时,AlexNet的表现远远超过了其他参赛的网络模型,并且在ImageNet比赛中获得了第一名。 标志着新的一轮神经网络热潮的开始","s":"背景","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#背景","p":381},{"i":385,"t":"ReLU激活函数 Dropout正则化、丢弃法 最大池化MaxPooling","s":"新的概念和技术","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#新的概念和技术","p":381},{"i":387,"t":"由于输入的图片更大,设置了更大的卷积核尺寸和步长 更大的池化窗口,使用最大池化 在卷积层中设置了更大的输出通道,提取更深层的特征、识别更多的模式 激活函数从Sigmoid改成了ReLU,减缓梯度消失 在卷积层和输出层之间仍使用两个全连接隐藏层,但在输出层之前增加了Dropout层做正则化 使用了数据增强data augmentation","s":"与LeNet比较","u":"/docs/Deep Learning/经典模型/AlexNet","h":"#与lenet比较","p":381},{"i":390,"t":"LeNet是由Yann LeCun等人于1998年提出的卷积神经网络结构,该结构由卷积层、池化层和全连接层组成,可以高效地处理手写数字图像,并在MNIST数据集上取得了很好的性能。 LeNet-5的成功标志着卷积神经网络在计算机视觉领域中的崛起,并促进了深度学习的快速发展。","s":"背景","u":"/docs/Deep Learning/经典模型/LeNet","h":"#背景","p":388},{"i":392,"t":"import torch import numpy as np from torch import nn as nn from torch.nn import functional as F from d2l import torch as d2l from matplotlib import pyplot as plt import os os.environ['http_proxy'] = 'http://127.0.0.1:7890' os.environ['https_proxy'] = 'https://127.0.0.1:7890' class LeNetReshape(nn.Module): def __init__(self): super(LeNetReshape, self).__init__() def forward(self, x): return x.reshape(-1, 1, 28, 28) class LeNet5(nn.Module): def __init__(self): super(LeNet5, self).__init__() self.net = torch.nn.Sequential( LeNetReshape(), # 激活函数应为Sigmoid nn.Conv2d(1, 6, kernel_size=5, padding=2), nn.LeakyReLU(), nn.AvgPool2d(kernel_size=2, stride=2), nn.Conv2d(6, 16, kernel_size=5), nn.LeakyReLU(), nn.AvgPool2d(kernel_size=2, stride=2), nn.Flatten(), nn.Linear(16 * 5 * 5, 120), nn.LeakyReLU(), nn.Linear(120, 84), nn.Sigmoid(), nn.Linear(84, 10)) def forward(self, x): return self.net(x) def evaluate_accuracy_gpu(net, data_iter, device=None): if isinstance(net, torch.nn.Module): net.eval() if not device: device = next(iter(net.parameters())).device metric = d2l.Accumulator(2) for X, y in data_iter: if isinstance(X, list): X = [x.to(device) for x in X] else: X = X.to(device) y = y.to(device) metric.add(d2l.accuracy(net(X), y), y.numel()) # 此处accuracy是统计 return metric[0] / metric[1] def accuracy(y_hat, y): return torch.sum(y_hat.argmax(dim=1) == y) def train(net, train_iter, test_iter, num_epochs, lr, device): def init_weights(m): if type(m) == nn.Linear or type(m) == nn.Conv2d: nn.init.xavier_uniform_(m.weight) net.apply(init_weights) net.to(device) optimizer = torch.optim.SGD(net.parameters(), lr=lr) loss = torch.nn.CrossEntropyLoss() loss.to(device) animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs], legend=['train loss', 'train acc', 'test acc']) timer, num_batches = d2l.Timer(), len(train_iter) metric = d2l.Accumulator(3) net.train() for epoch in range(num_epochs): for batch, (X, y) in enumerate(train_iter): timer.start() optimizer.zero_grad() X, y = X.to(device), y.to(device) y_hat = net(X) l = loss(y_hat, y) l.backward() optimizer.step() metric.add(l * X.shape[0], accuracy(y_hat, y), y.numel()) timer.stop() train_l = metric[0] / metric[2] train_acc = metric[1] / metric[2] if (batch + 1) % (num_batches // 5) == 0 or batch == num_batches - 1: animator.add(epoch + (batch + 1) / num_batches, (train_l, train_acc, None)) test_acc = evaluate_accuracy_gpu(net, test_iter) animator.add(epoch + 1, (None, None, test_acc)) print(f'loss {train_l:.3f}, train acc {train_acc:.3f}, test acc {test_acc:.3f}') print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec on {str(device)}') plt.show() batch_size = 256 train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size) lr, num_epochs = 0.9, 10 lenet = LeNet5() train(lenet, train_iter, test_iter, num_epochs, lr, d2l.try_gpu())","s":"代码实现","u":"/docs/Deep Learning/经典模型/LeNet","h":"#代码实现","p":388},{"i":394,"t":"在分类模型中,最后两个全连接层之间不要使用ReLU激活函数。因为ReLU的范围是[0, +∞),它会将所有负数都变成0。而最后一层全连接层输出了类别信息,倒数第二层的输出值包含着非常重要的类别信息,此时使用激活函数很可能会导致信息丢失。","s":"问题","u":"/docs/Deep Learning/经典模型/LeNet","h":"#问题","p":388},{"i":398,"t":"感知机是一种二元线性分类模型,旨在寻找一个超平面(在二维空间中即为一条直线),将不同类别的实例划分到不同的区域。感知机的训练过程包括迭代地对样本进行分类,并根据分类错误的情况调整超平面的参数,使得分类准确率逐步提高。感知机是基础的机器学习算法之一,其思想和方法对神经网络等更复杂的模型也具有启发意义。","s":"一、什么是感知机","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#一什么是感知机","p":396},{"i":400,"t":"输入向量:感知机的输入向量是一个n维向量x=(x1,x2,...,xn)x=(x_1,x_2,...,x_n)x=(x1​,x2​,...,xn​),表示一个样本的各个特征值。 权值向量:感知机的权值向量也是一个n维向量w=(w1,w2,...,wn)w=(w_1,w_2,...,w_n)w=(w1​,w2​,...,wn​),表示每个特征对应的权重。 偏置项:偏置项bbb是一个常数,可看作是模型的截距,用于调整阈值函数的位置。 内积运算:感知机将输入向量和权值向量进行内积运算,并加上偏置项,得到输入信号z=w∗x+bz=w*x+bz=w∗x+b。 阈值函数:将输入信号zzz带入阈值函数,如符号函数sign(z)sign(z)sign(z),即可得到分类结果。 损失函数:感知机使用误分类点到超平面的距离来作为损失函数,即 L(y,z)=max(0,−y∗z)(1)L(y,z)=max(0,-y*z) \\tag{1}L(y,z)=max(0,−y∗z)(1) 其中yyy是样本的真实标签,zzz是预测值。 参数更新:根据当前样本误分类情况来对权值向量www和偏置项bbb进行迭代更新。 收敛条件:当全部训练样本被正确分类或达到最大迭代次数时,感知机算法停止迭代。 感知机训练流程伪代码如下所示: initialize w = 0 and b = 0 repeat if yi * zi <= 0 then w = w + yi * xi and b = b + yi end if until all classified correctly","s":"二、详细原理","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#二详细原理","p":396},{"i":402,"t":"感知机是一个二分类模型,最早的AI模型之一 求解算法等价于使用批量大小为1的梯度下降 要求数据集线性可分,不能拟合XOR异或等非线性问题,导致第一次AI寒冬","s":"三、总结","u":"/docs/Deep Learning/经典模型/Perceptron","h":"#三总结","p":396},{"i":405,"t":"concat与stack函数 stack函数对输入的两个张量在指定的维度进行堆叠,是创建了新的维度 concat函数对输入的张量在指定维度进行拼接,没有创建新的维度 # stack和concat函数 a = torch.rand(4, 3) # A班4位同学,每位同学3科成绩 b = torch.rand(4, 3) # B班4位同学,每位同学3科成绩 c = torch.stack((a, b), dim=0) # 理解:年级所有同学的3科成绩(假设年级只有A班和B班两个班,每个班只有四名同学) print(c.shape) # torch.Size([2, 4, 3]) d = torch.concat((a, b), dim=1) # 理解:a是A班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6科成绩 print(d.shape) # torch.Size([4, 6]) list和tensor乘法不同之处 list的*乘法是复制元素,改变list的shape tensor的*乘法是对tensor中的元素进行点乘计算 a = torch.tensor([[3, 3, 3, 3]]) b = [3] # list的*乘是复制元素进行扩展 print(a * 3) # tensor([[9, 9, 9, 9]]) print(b * 3) # [3, 3, 3] 最大值 / 最小值索引:argmax / argmin 需要通过参数dim指定操作的维度,dim的理解 官方解释:The dimension to reduce 以二维张量举例,dim=1即在每一行中选出一个最大值 / 最小值元素的索引,索引的shape应为[dim0, 1],即reduce了dim=1的维度 # 最大值最小值索引 a = torch.tensor([[0.1, 0.9, 0.3], [0.9, 0.8, 0.99], [0.1, 0.7, 0.8], [0.88, 0.1, 0.2]]) # [4, 3] print(\"argmax output: \", a.argmax(dim=0), a.argmax(dim=1)) # argmax output: tensor([1, 0, 1]) tensor([1, 2, 2, 0]) Python zip函数 zip函数可以理解为压缩,将输入的两个迭代器的最外层对应元素压缩为一个新的元素 a = torch.tensor([1, 2, 3]) b = torch.tensor([4, 5, 6]) c = zip(a, b) for i in c: print(i) ''' (tensor(1), tensor(4)) (tensor(2), tensor(5)) (tensor(3), tensor(6)) ''' a = torch.tensor([[1, 2, 3], [3, 2, 1]]) b = torch.tensor([[4, 5, 6], [6, 5, 4]]) c = zip(a, b) for i in c: print(i) ''' (tensor([1, 2, 3]), tensor([4, 5, 6])) (tensor([3, 2, 1]), tensor([6, 5, 4])) '''","s":"一、常用函数部分","u":"/docs/Deep Learning/基础知识/PyTroch基础","h":"#一常用函数部分","p":403},{"i":409,"t":"CNN模型的输入向量的形状是固定的,其输出向量的形状也是固定的或可以根据不同的下游任务而唯一确定,即输入形状与下游任务共同确定了一个CNN模型的架构,具有较强的固定性。 信息 在视觉中,输入大多为数字图像,其形状可以大致分为由尺寸和通道数来决定。 从输入图像的尺寸看,当CNN中没有全连接层时,本质上可以接受任意尺寸的输入,但这是狭隘的。若考虑其下游任务以及输出,如FCN(Fully Convolution Network),FCN通过最后通过反卷积将tensor还原到原始图像尺寸,即在CNN中,输入与输出(下游任务的要求)都影响着CNN网络的结构。 从通道数看,CNN本质上可以接受任意通道数的图像输入,但是其模型效果将会受到极大的影响。以一个使用通道数为3的数据集进行训练的CNN模型,但在测试阶段分别使用通道数为 1 和 6 的数据进行推理的情形为例,进行分析: 通道数为1的测试集: 情况: 如果使用通道数为 1 的数据进行推理,即灰度图像,而模型在训练时是使用 RGB 数据集训练的,模型可能会受到一些影响。 解释: 模型可能在训练时学到了关于颜色的特定信息,而在测试时,如果输入是灰度图像,那些颜色信息将不可用。 建议: 在这种情况下,模型可能会失去对颜色信息的敏感性,可能需要进行进一步的调整或微调,以适应灰度图像的特性。 通道数为6的测试集: 情况: 如果使用通道数为 6 的数据进行推理,模型可能会面临额外的挑战,因为它在训练时只见过 3 个通道的数据。 解释: 模型在训练时学到的权重是基于 3 个通道的数据的,对于额外的通道,模型可能无法有效利用这些信息。 建议: 对于通道数不匹配的情况,可以考虑进行通道的适当组合或调整。这可能包括降低通道数(例如,只使用前 3 个通道),或者通过某种方式将 6 个通道映射到 3 个通道,例如通过某种特定的数据预处理。 当模型的输入更复杂(sophisticated),是长度不定的向量序列(sequence)时,CNN不能很好地处理,且不能解决输出由输入和模型自行决定的下游任务,如生成类任务。","s":"输入与输出的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#输入与输出的局限性","p":406},{"i":411,"t":"在CNN中引入了局部连接和权值共享的归纳偏置: 局部连接:CNN使用卷积层通过滑动卷积核在输入上进行局部感受野的操作。每个神经元只与输入的一小部分区域相连,这意味着每个神经元只能接触到局部的上下文信息。这样的设计使得CNN更适用于处理图像等数据,其中局部结构通常很重要。 权值共享: CNN的参数共享使得模型能够学习到图像中的局部特征,这也是一种对于上下文的假设。相邻位置上的权重共享使得模型能够对局部结构进行建模,并且这种权重共享使得CNN具有更强的归纳偏置。 CNN的设计理念认为:在图像任务中,局部结构通常更为重要,局部连接和权值共享使得CNN更适用于图像处理等任务。 但也正是这种设计理念,使得CNN在面临长输入序列时不能很好地综合上下文信息、提取位置信息,因此Self-Attention应运而生,允许每个位置关注到序列中地所有其他位置。这种全局关联性质使得Transformer能够捕捉序列中的长距离依赖关系。","s":"关联上下文的局限性","u":"/docs/Deep Learning/论文笔记/Self-Attention","h":"#关联上下文的局限性","p":406},{"i":413,"t":"提示 欢迎来到笔记本的深度学习部分","s":"Welcome","u":"/docs/Deep Learning/intro","h":"","p":412},{"i":415,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Deep Learning/intro","h":"#支持我","p":412},{"i":417,"t":"提示 对于TensorFlow框架,可以使用TensorBoard实现可视化。 对于PyTorch框架,可以使用Visdom或TensorBoardX实现可视化,本篇主要讲述Visdom。","s":"Visdom可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"","p":416},{"i":419,"t":"pip install visdom","s":"一、安装Visdom","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#一安装visdom","p":416},{"i":422,"t":"首先要通过终端启动Visdom,使用本机端口运行服务器。 以下二者均可。 visdom python -m visdom.server","s":"0. Visdom的启动","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#0-visdom的启动","p":416},{"i":424,"t":"from visdom import Visdom vis = Visdom() # 实例化 # 创建一条曲线,前两个参数分别为y轴数据、x轴数据,win参数是窗口的唯一标识,opt可选字典中可以给出窗口的title和legend vis.line([0.], [0.], win='win_id', opts=dict(title=\"win_title\")) # 在训练过程中的合适位置向初始化的曲线中喂数据 # viz.line([real_y_data], [global_step], win='win_id', update='append') # 查看训练loss vis.line([loss.item()], [epoch], win='win_id', update='append') # 对于非image数据,在传入visdom时仍需要先转化为numpy类型","s":"1. 单窗口单曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#1-单窗口单曲线的可视化","p":416},{"i":426,"t":"from visdom import Visdom vis = Visdom() vis.line([[0., 0.]], [0.], win='win_id', opts=dic(title=\"win_title\", legend=[\"curve_name_1\", \"curve_name_2\"])) # 在训练过程中的合适位置向初始化的曲线中喂数据 viz.line([[y1, y2]], [global_step], win='win_id', update='append')","s":"2. 单窗口多曲线的可视化","u":"/docs/Deep Learning/实用技巧/Visdom可视化","h":"#2-单窗口多曲线的可视化","p":416},{"i":428,"t":"通过编写脚本函数的方式,手动开启代理","s":"终端代理","u":"/docs/Linux/实用工具/终端代理","h":"","p":427},{"i":430,"t":"新建脚本文件terminal_proxy.sh # 开启代理 function proxy_on(){ export ALL_PROXY=socks5://127.0.0.1:7890 export http_proxy=http://127.0.0.1:7890 export https_proxy=https://127.0.0.1:7890 echo -e \"已开启代理\" } # 关闭代理 function proxy_off(){ unset ALL_PROXY unset http_proxy unset https_proxy echo -e \"已关闭代理\" }","s":"一、编写脚本","u":"/docs/Linux/实用工具/终端代理","h":"#一编写脚本","p":427},{"i":432,"t":"fish的配置文件:~/.config/fish/config.fish zsh的配置文件:~/.zshrc bash的配置文件:~/.bashrc 在配置文件末尾添加以下代码 source /path/terminal_proxy.sh","s":"二、关联终端配置文件","u":"/docs/Linux/实用工具/终端代理","h":"#二关联终端配置文件","p":427},{"i":434,"t":"在终端中输入以下命令即可开启代理 proxy_on 在终端中输入以下命令即可关闭代理 proxy_off","s":"三、使用","u":"/docs/Linux/实用工具/终端代理","h":"#三使用","p":427},{"i":437,"t":"dock显示的图标是全局图标,程序启动器的desktop文件位于/usr/share/applications中,全局主题中图标主题的程序logo位于~/.local/share/icons/Mkos-Big-Sur-Night/128x128/apps(deppending on specific situation)中。在logo文件夹中挑选想要的logo,在desktop中的icon位置修改即可 应用更新的时候会同时更新.desktop文件,因此在更换图标是最好直接更换在主题文件中替换icon,而不是更改desktop的icon路径 Finder小组件中application title文字不能垂直居中,可以更换为Window title插件","s":"一、latte-dock","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#一latte-dock","p":435},{"i":439,"t":"Finder栏中Plasmoids左半部分从左至右依次为: kpple menu application title/window titile(if the text of application title can't be centered vertically) global menu 右半部分从左至右依次为: resources monitor (fork) mcOS BS Inline Battery 网络 Control Center(replace the icon with search icon) Control Center(replace the icom with menu icon) Better Inline Clock 安装方法: plasmpkg2 -u xxx.plasmoid","s":"二、Kde Plasmoids","u":"/docs/Linux/客制化/如何让你的KDE看起来更像macOS","h":"#二kde-plasmoids","p":435},{"i":442,"t":"未知,可能是由Windows休眠模式导致","s":"一、发生原因","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#一发生原因","p":440},{"i":444,"t":"使用ntfsfix修复ntfs磁盘 安装ntfsfix yay -S ntfsfix 查看问题分区 df -h 修复 sudo ntfsfix /dev/your_partition 重启 reboot","s":"二、解决方案","u":"/docs/Linux/问题解决/双系统挂载Windows磁盘为只读文件","h":"#二解决方案","p":440},{"i":446,"t":"提示 欢迎来到笔记本的Linux部分","s":"Welcome","u":"/docs/Linux/intro","h":"","p":445},{"i":448,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Linux/intro","h":"#支持我","p":445},{"i":450,"t":"告示栏的启用 在docusaurus.config.js的themeConfig中加入以下代码 announcementBar: { id: 'announcementBar-3', content: 'Welcome to my notebook!', isCloseable: false, }, 告示栏的背景个性化 在custom.css中加入以下代码 div[class^='announcementBar_'] { background: repeating-linear-gradient( -35deg, var(--ifm-color-primary-lighter), var(--ifm-color-primary-lighter) 20px, var(--ifm-color-primary-lightest) 10px, var(--ifm-color-primary-lightest) 40px ); font-weight: 700; }","s":"告示栏","u":"/docs/Others/博客搭建/告示栏","h":"","p":449},{"i":452,"t":"提示 保研面试中需要准备的问题,夏令营、预推免均可参考 参考视频:https://www.bilibili.com/video/BV1564y1e7b9/?spm_id_from=333.999.0.0&vd_source=24d8fcf68bc0e2b0003defe0995cf533","s":"要准备的问题","u":"/docs/Others/面试/要准备的问题","h":"","p":451},{"i":454,"t":"中文长、短自我介绍(1min、5min) 英文自我介绍(放在中文里的、全程英文的)","s":"一、自我介绍部分","u":"/docs/Others/面试/要准备的问题","h":"#一自我介绍部分","p":451},{"i":456,"t":"数据库bc范式和第三范式区别 特征值和特征向量的意义以及之间的关系 TCP/IP的工作过程描述 在局域网中TCP/IP协议栈是否冗余 列举各种排序算法以及复杂度 栈和队列的区别 如何用两个栈实现队列 使用双指针把负数移到正数前,要求控制到O(n)复杂度 动态规划 询问什么情况下要使用动态规划? TCP和UDP之间的区别 解释什么是中心极限定理 怎样快速找到数组中第k大的数?","s":"二、专业课面试题","u":"/docs/Others/面试/要准备的问题","h":"#二专业课面试题","p":451},{"i":458,"t":"最近阅读的论文 对于人脑和机器学习的思考","s":"三、自由面试题","u":"/docs/Others/面试/要准备的问题","h":"#三自由面试题","p":451},{"i":460,"t":"提示 欢迎来到笔记本的杂记部分","s":"Welcome","u":"/docs/Others/intro","h":"","p":459},{"i":462,"t":"如果可以帮到你的话就给个免费的Star吧!","s":"支持我!","u":"/docs/Others/intro","h":"#支持我","p":459}],"index":{"version":"2.3.9","fields":["t"],"fieldVectors":[["t/2",[0,0.427,1,3.185,2,2.227,3,2.227,4,2.227,5,1.83,6,4.108,7,2.227,8,2.35,9,2.227,10,3.322,11,2.35,12,2.227,13,2.35,14,2.703,15,1.413,16,2.958,17,3.985,18,3.065,19,2.958,20,3.185,21,3.322,22,4.119,23,2.488,24,2.227,25,2.227,26,2.35,27,2.227,28,2.35,29,2.124,30,2.35,31,3.482,32,3.482,33,3.482,34,2.35,35,2.35,36,2.124,37,2.227,38,2.35,39,2.227,40,2.124,41,2.35,42,2.35,43,2.35,44,2.35]],["t/4",[0,0.424,1,3.202,2,2.243,3,2.243,4,2.243,5,1.843,6,4.118,7,2.243,8,2.367,9,2.243,10,3.34,11,2.367,12,2.243,13,2.367,14,2.71,15,1.421,16,2.974,17,4,18,3.081,19,2.974,20,3.202,21,3.34,22,4.134,23,2.501,24,2.243,25,2.243,26,2.367,27,2.243,28,2.367,29,2.139,30,2.367,31,3.501,32,3.501,33,3.501,34,2.367,35,2.367,36,2.139,37,2.243,38,2.367,39,2.243,40,2.139,41,2.367,42,2.367,43,2.367,44,2.367]],["t/6",[0,0.43,40,3.769,45,4.17,46,4.81,47,4.17,48,4.444,49,4.81,50,4.444,51,5.944,52,4.81,53,4.81,54,3.247,55,4.444,56,4.444,57,4.444,58,4.81,59,3.951,60,4.81,61,3.769,62,4.81,63,4.81,64,4.444]],["t/9",[0,0.435,65,6.034,66,6.034,67,6.034,68,5.408,69,4.996,70,6.034,71,6.034,72,6.034,73,7.752,74,6.034,75,6.034,76,6.034,77,5.408]],["t/11",[0,0.454]],["t/13",[0,0.419]],["t/15",[0,0.427,1,3.185,2,2.227,3,2.227,4,2.227,5,1.83,6,4.108,7,2.227,8,2.35,9,2.227,10,3.322,11,2.35,12,2.227,13,2.35,14,2.703,15,1.413,16,2.958,17,3.985,18,3.065,19,2.958,20,3.185,21,3.322,22,4.119,23,2.488,24,2.227,25,2.227,26,2.35,27,2.227,28,2.35,29,2.124,30,2.35,31,3.482,32,3.482,33,3.482,34,2.35,35,2.35,36,2.124,37,2.227,38,2.35,39,2.227,40,2.124,41,2.35,42,2.35,43,2.35,44,2.35]],["t/17",[0,0.424,1,3.202,2,2.243,3,2.243,4,2.243,5,1.843,6,4.118,7,2.243,8,2.367,9,2.243,10,3.34,11,2.367,12,2.243,13,2.367,14,2.71,15,1.421,16,2.974,17,4,18,3.081,19,2.974,20,3.202,21,3.34,22,4.134,23,2.501,24,2.243,25,2.243,26,2.367,27,2.243,28,2.367,29,2.139,30,2.367,31,3.501,32,3.501,33,3.501,34,2.367,35,2.367,36,2.139,37,2.243,38,2.367,39,2.243,40,2.139,41,2.367,42,2.367,43,2.367,44,2.367]],["t/19",[0,0.43,40,3.769,45,4.17,46,4.81,47,4.17,48,4.444,49,4.81,50,4.444,51,5.944,52,4.81,53,4.81,54,3.247,55,4.444,56,4.444,57,4.444,58,4.81,59,3.951,60,4.81,61,3.769,62,4.81,63,4.81,64,4.444]],["t/21",[0,0.44,14,1.118,15,1.275,78,1.597,79,1.355,80,1.499,81,1.597,82,2.712,83,4.376,84,1.597,85,3.528,86,1.597,87,2.825,88,1.597,89,1.597,90,1.597,91,1.597,92,1.597,93,3.259,94,1.355,95,1.597,96,2.712,97,1.597,98,1.597,99,1.597,100,3.651,101,1.597,102,1.597,103,1.597,104,1.597,105,1.355,106,1.597,107,1.597,108,3.142,109,1.597,110,2.712,111,1.597,112,1.597,113,1.597,114,1.597,115,1.597,116,1.597,117,1.597,118,2.545,119,1.597,120,2.712,121,1.597,122,2.712,123,1.597,124,1.597,125,1.597,126,1.597,127,2.205,128,1.597,129,1.597,130,3.534,131,3.065,132,1.42,133,2.712,134,1.42,135,1.597,136,3.794,137,2.712,138,2.712,139,1.597,140,1.597,141,1.597,142,1.597,143,1.597,144,1.597,145,1.597,146,1.597,147,1.597,148,1.597,149,1.597]],["t/23",[0,0.439,14,1.122,15,1.279,78,1.604,79,1.361,80,1.505,81,1.604,82,2.722,83,4.381,84,1.604,85,3.535,86,1.604,87,2.832,88,1.604,89,1.604,90,1.604,91,1.604,92,1.604,93,3.269,94,1.361,95,1.604,96,2.722,97,1.604,98,1.604,99,1.604,100,3.66,101,1.604,102,1.604,103,1.604,104,1.604,105,1.361,106,1.604,107,1.604,108,3.153,109,1.604,110,2.722,111,1.604,112,1.604,113,1.604,114,1.604,115,1.604,116,1.604,117,1.604,118,2.554,119,1.604,120,2.722,121,1.604,122,2.722,123,1.604,124,1.604,125,1.604,126,1.604,127,2.214,128,1.604,129,1.604,130,3.546,131,3.071,132,1.427,133,2.722,134,1.427,135,1.604,136,3.804,137,2.722,138,2.722,139,1.604,140,1.604,141,1.604,142,1.604,143,1.604,144,1.604,145,1.604,146,1.604,147,1.604,148,1.604,149,1.604]],["t/25",[14,2.742,150,8.032]],["t/27",[0,0.45,87,2.393,151,1.747,152,1.473,153,1.473,154,2.441,155,2.325,156,2.594,157,2.594,158,2.594,159,2.325,160,2.594,161,6.04,162,2.594,163,2.594,164,2.594,165,2.594,166,2.594,167,1.168,168,4.186,169,2.594,170,2.594,171,2.594,172,2.594,173,2.594,174,2.594,175,2.149,176,2.594,177,2.594,178,0.839,179,1.622,180,2.594,181,5.263,182,2.594,183,2.594,184,2.594,185,2.594,186,3.475,187,1.473,188,1.473,189,0.839,190,1.473,191,1.473,192,1.473,193,1.473,194,1.035,195,1.473,196,1.473,197,1.473,198,1.473,199,1.473,200,1.473,201,1.473,202,1.473,203,1.321,204,1.473,205,1.473,206,0.722,207,1.473,208,1.473,209,1.473,210,1.145,211,1.22,212,4.157,213,1.473,214,1.473,215,1.473,216,1.473,217,1.473,218,1.473,219,1.473,220,1.473,221,1.473,222,1.473,223,1.473,224,1.473,225,2.878,226,1.321,227,1.473,228,1.473,229,1.473,230,1.473,231,1.321,232,1.321,233,1.473,234,1.473,235,1.473,236,1.473,237,1.473,238,1.473,239,1.473,240,1.473,241,1.473,242,1.473,243,1.473,244,1.473,245,1.473,246,2.594,247,1.473,248,1.473,249,1.473,250,1.473,251,1.473,252,1.321,253,1.473,254,1.321,255,1.473,256,1.22,257,1.321,258,1.473,259,1.473]],["t/29",[0,0.431,87,1.667,100,2.154,127,2.238,151,2.238,206,1.629,210,2.583,225,2.752,260,3.324,261,3.953,262,3.324,263,3.324,264,3.324,265,2.752,266,2.979,267,2.979,268,2.979,269,2.979,270,2.979,271,2.979,272,2.979,273,2.979,274,2.979,275,4.56,276,2.979,277,2.979,278,5.734,279,2.979,280,2.752,281,2.979,282,4.56,283,2.979,284,4.56,285,2.979,286,2.979,287,4.56,288,2.979,289,2.979,290,2.979,291,2.979,292,2.979,293,5.54,294,2.979,295,2.979,296,2.979,297,2.979,298,5.54,299,2.979,300,2.752,301,2.979,302,2.979,303,2.979]],["t/33",[0,0.461]],["t/35",[0,0.448]],["t/37",[0,0.372]],["t/40",[304,7.552,305,3.304,306,6.891,307,5.706,308,6.891,309,5.706,310,6.176,311,6.176,312,6.891,313,6.891]],["t/42",[305,3.411,307,5.892,309,5.892,314,7.115,315,6.378,316,7.701,317,7.115,318,7.115]],["t/44",[0,0.379,14,1.952,15,1.708,203,5.125,304,5.125,307,4.735,309,4.735,310,5.125,311,6.71,315,5.125,316,5.125,319,5.718,320,5.718,321,7.487,322,5.718,323,4.443,324,5.718,325,5.718,326,5.718,327,5.718]],["t/46",[0,0.372]],["t/50",[0,0.449,15,1.731,328,7.979,329,5.794,330,5.794,331,5.794,332,5.794,333,5.794,334,5.794,335,5.794,336,5.794,337,5.794]],["t/54",[0,0.415,338,7.887]],["t/57",[0,0.422,339,9.295,340,7.233,341,7.233]],["t/59",[0,0.365,342,8.032]],["t/62",[0,0.419]],["t/64",[343,9.227]],["t/66",[0,0.465]],["t/69",[0,0.448]],["t/71",[0,0.434,344,6.944]],["t/74",[0,0.448]],["t/76",[0,0.439,93,3.565,328,7.819,345,5.502,346,5.502,347,4.931,348,5.502,349,5.502,350,4.931,351,5.502,352,5.502,353,4.931,354,5.502,355,5.502,356,5.502,357,5.502,358,5.502]],["t/79",[0,0.458]],["t/81",[0,0.446,347,7.409,350,7.409,353,5.988,359,6.68,360,6.68]],["t/83",[0,0.45,305,3.526,361,5.416]],["t/85",[0,0.448,305,4.16,361,5.326]],["t/87",[0,0.448]],["t/91",[0,0.433,151,6.056,261,4.625,362,5.952,363,5.335,364,5.952,365,5.952,366,5.952,367,5.952,368,5.952,369,5.335,370,5.952,371,5.335]],["t/93",[151,5.216,372,7.747,373,7.747,374,7.747]],["t/95",[0,0.41,151,4.568,261,5.271,369,6.081,371,6.081,375,6.784,376,6.081,377,6.784,378,6.784,379,6.784]],["t/97",[0,0.419]],["t/99",[0,0.424,151,5.514,210,5.112,278,6.781,305,3.154,380,8.189,381,6.579,382,5.897]],["t/101",[0,0.442,14,0.588,22,1.078,47,1.339,79,1.21,87,2.34,127,1.16,151,3.879,155,2.666,206,1.923,210,2.311,225,1.427,261,1.339,265,1.427,266,1.545,267,1.545,268,1.545,269,1.545,270,1.545,271,2.666,272,1.545,273,1.545,274,1.545,275,2.666,276,1.545,277,1.545,278,3.865,279,1.545,280,1.427,281,1.545,282,2.666,283,1.545,284,2.666,285,1.545,286,1.545,287,2.666,288,1.545,289,1.545,290,1.545,291,1.545,292,1.545,293,3.516,294,1.545,295,1.545,296,1.545,297,1.545,298,3.516,299,1.545,300,1.427,301,1.545,302,1.545,303,1.545,363,1.545,376,1.545,383,1.723,384,1.723,385,1.723,386,2.974,387,1.723,388,1.545,389,1.723,390,1.723,391,1.723,392,2.974,393,4.667,394,1.723,395,1.723,396,2.974,397,4.667,398,4.667,399,2.974,400,1.723,401,1.723,402,1.723,403,1.723,404,1.723,405,2.974,406,2.974,407,1.723,408,1.723,409,2.974,410,1.723,411,1.723,412,1.545,413,1.723,414,1.545,415,1.723,416,1.723,417,1.723,418,1.723,419,1.723,420,1.723,421,1.723,422,1.545,423,1.723,424,1.723,425,1.723,426,1.339,427,1.723,428,1.723,429,1.723,430,1.723,431,1.545,432,1.545,433,1.723,434,2.974,435,2.974,436,2.974,437,1.723,438,1.723,439,1.723,440,1.723]],["t/107",[441,7.887,442,7.887,443,7.887]],["t/110",[444,9.821,445,6.68,446,6.68,447,6.68,448,6.68,449,6.68,450,6.68,451,6.68]],["t/112",[0,0.445,452,7.612]],["t/114",[0,0.419]],["t/116",[453,5.747]],["t/118",[0,0.294,454,8.855,455,6.482,456,6.482,457,6.482,458,6.482,459,6.482,460,6.482,461,5.81,462,6.482,463,6.482,464,6.482,465,6.482]],["t/120",[0,0.358,466,6.413]],["t/122",[0,0.372]],["t/124",[0,0.467,14,2.696,23,1.58,167,2.955,305,1.44,467,3.304,468,2.692,469,5.881,470,5.197,471,5.197,472,5.881,473,6.772,474,1.878,475,2.692,476,2.692,477,4.216,478,2.692,479,2.692,480,1.621]],["t/126",[0,0.466,14,2.657,15,0.609,23,1.071,131,1.609,167,3.019,175,1.046,178,1.16,206,0.998,212,1.232,305,2.788,467,3.12,468,1.826,469,4.67,470,3.981,471,3.981,472,5.212,473,5.648,474,2.778,475,1.826,476,1.826,477,3.074,478,1.826,479,1.826,480,1.85,481,1.583,482,1.16,483,1.16,484,1.583,485,1.274,486,2.037,487,3.429,488,2.037,489,2.037,490,2.037,491,2.037,492,2.991,493,2.037,494,2.037,495,2.037,496,1.431]],["t/130",[497,7.887,498,7.887,499,7.887]],["t/132",[0,0.425,77,5.408,131,2.832,500,6.024,501,7.752,502,6.034,503,6.034,504,6.034,505,6.034,506,6.034,507,6.034,508,6.034,509,6.034,510,6.034]],["t/134",[0,0.43,15,1.881,61,4.422,511,4.422,512,6.295,513,6.295,514,7.965,515,6.295,516,6.295,517,6.295,518,6.295,519,5.642]],["t/136",[0,0.394,520,7.233,521,7.233,522,7.233,523,6.483,524,4.008,525,7.233]],["t/139",[526,8.182]],["t/141",[0,0.394,15,1.3,54,2.632,131,2.042,189,2.478,511,4.361,524,2.41,527,4.351,528,4.351,529,7.24,530,6.209,531,4.351,532,4.351,533,4.351,534,4.351,535,4.351,536,7.24,537,4.351,538,4.351,539,4.351,540,4.351,541,2.632,542,4.351,543,4.351,544,4.351,545,4.351,546,4.351,547,4.351,548,4.351,549,6.209,550,4.351,551,4.351,552,4.351,553,4.351,554,4.351,555,4.351]],["t/143",[0,0.299,15,1.966,45,5.112,83,3.377,131,3.088,524,3.645,556,6.579,557,6.579,558,6.579,559,6.579,560,6.579,561,6.579,562,6.579,563,6.579]],["t/145",[0,0.446,15,1.996,80,5.191,83,3.429,131,3.135,519,5.988,564,6.68,565,6.68]],["t/147",[0,0.41,566,7.747,567,7.747]],["t/149",[15,2.446,87,4.47,189,5.079,568,6.781,569,6.579,570,6.579,571,6.579,572,6.579]],["t/151",[15,2.197,87,4.394,189,4.19,568,6.09,573,7.355,574,7.355]],["t/153",[15,2.126,87,3.568,189,4.894,524,3.942,568,5.892,575,7.115,576,7.115,577,7.115]],["t/155",[0,0.328,524,5.15,578,7.233,579,7.233,580,7.233,581,7.233]],["t/157",[61,6.347,582,7.747,583,7.747]],["t/159",[0,0.308,15,2.027,29,4.765,87,4.184,189,3.865,584,6.784,585,6.784,586,6.784,587,6.784,588,6.784,589,6.784]],["t/161",[0,0.434,590,7.747]],["t/164",[591,6.241,592,6.651]],["t/166",[0,0.394,15,2.161,131,3.395,591,6.742,592,7.185]],["t/168",[0,0.394,15,2.161,131,3.395,591,6.742,592,7.185]],["t/171",[0,0.465,15,2.632,131,2.649,175,3.811,593,5.644,594,5.644]],["t/173",[0,0.461,595,7.001,596,7.001]],["t/175",[0,0.458]],["t/177",[0,0.41,597,6.944,598,6.944]],["t/179",[0,0.472,175,4.127,599,4.752,600,4.752,601,4.752]],["t/182",[0,0.467,85,3.844,602,4.147,603,4.147,604,4.147,605,4.147,606,4.147,607,4.147,608,4.147,609,4.147,610,4.367,611,4.147,612,4.147,613,3.831,614,4.147]],["t/185",[0,0.471,175,4.028,597,4.532,598,4.532,599,4.532,600,4.532,601,4.532]],["t/187",[0,0.467,85,3.844,602,4.147,603,4.147,604,4.147,605,4.147,606,4.147,607,4.147,608,4.147,609,4.147,610,4.367,611,4.147,612,4.147,613,3.831,614,4.147]],["t/189",[0,0.419]],["t/191",[453,5.747]],["t/193",[0,0.365,615,8.032]],["t/195",[0,0.46,6,3.906,14,2.431,23,3.746,54,4.713,108,3.11,167,3.208,178,2.407,206,2.07,212,2.556,480,2.279,481,3.282,482,2.407,483,2.407,492,2.844,496,2.967,616,4.224,617,4.224]],["t/197",[0,0.46,6,3.776,14,2.571,15,1.397,23,4.289,54,3.954,167,2.943,305,2.242,474,2.925,618,4.677,619,7.531,620,3.872]],["t/199",[0,0.428,14,3.219,621,6.784,622,6.784,623,6.784,624,6.784]],["t/201",[0,0.459,5,3.671,14,2.656,15,1.812,85,3.114,94,3.492,167,3.316,178,1.838,206,1.581,212,1.952,480,1.74,481,2.506,482,1.838,483,1.838,485,2.017,625,3.226,626,4.456,627,4.456,628,3.226,629,3.226,630,3.226,631,6.067,632,6.817,633,5.297,634,3.226]],["t/203",[0,0.323,14,2.429,635,7.115,636,7.115,637,7.115,638,7.115,639,7.115,640,7.115,641,7.115]],["t/205",[0,0.328,15,2.161,642,7.233,643,8.677,644,7.233,645,7.233,646,7.233]],["t/207",[0,0.458,14,2.797,15,1.847,23,1.748,167,3.119,178,1.894,206,1.629,212,2.011,257,6.69,305,2.439,431,5.54,467,2.335,474,2.079,480,1.793,481,2.583,482,1.894,483,1.894,484,2.583,492,3.426,496,3.574,620,2.752,647,6.181,648,3.324,649,4.213,650,3.324,651,3.324,652,3.324]],["t/209",[0,0.428,87,3.401,653,6.081,654,6.784,655,8.345,656,6.784,657,6.784,658,6.784]],["t/211",[659,4.069,660,5.794,661,5.794,662,5.193,663,5.794,664,5.794,665,7.53,666,5.794,667,7.551,668,5.794,669,5.193,670,3.754,671,4.069,672,5.193,673,5.794,674,5.193,675,5.193,676,5.193,677,5.794,678,4.797]],["t/213",[0,0.459,85,2.508,485,4.213,492,4.536,659,4.731,679,4.886,680,4.886,681,3.598,682,4.886,683,3.598,684,3.797,685,3.797,686,3.797,687,4.379,688,4.046,689,4.886,690,4.886,691,4.886]],["t/215",[0,0.449,692,6.176,693,6.891,694,6.891,695,6.891,696,6.891]],["t/217",[0,0.458,467,5.397,492,4.008,496,4.18,681,4.382,683,4.382,684,4.625,697,5.952,698,5.335,699,5.952]],["t/219",[0,0.454,681,4.773,683,4.773,685,5.037,686,5.037,700,6.482,701,6.482,702,6.482]],["t/221",[0,0.449,6,3.068,15,1.828,131,2.871,703,6.118,704,6.118,705,6.118,706,6.118,707,6.118,708,6.118,709,6.118,710,4.12]],["t/223",[305,3.526,474,4.6,711,6.593,712,5.416,713,7.355,714,6.593,715,6.593]],["t/225",[681,5.326,683,5.326,684,5.621,685,5.621,686,5.621,687,6.483,688,5.989,716,7.233]],["t/227",[0,0.452,6,3.541,131,2.458,305,2.511,474,3.276,633,4.07,670,3.394,712,3.857,717,4.337,718,5.238,719,4.695,720,5.238,721,5.238,722,4.337,723,5.238,724,5.238,725,5.238,726,4.695,727,4.695]],["t/229",[0,0.399,83,3.279,132,4.703,212,3.865,671,4.486,674,5.725,675,5.725,676,5.725,728,6.387,729,6.387,730,6.387,731,6.387,732,6.387,733,6.387]],["t/231",[0,0.462,14,2.431,15,2.127,256,4.39,280,4.39,305,2.542,734,5.301,735,4.752,736,5.301,737,5.301,738,5.301,739,5.301,740,5.301]],["t/233",[0,0.419]],["t/235",[453,5.747]],["t/238",[0,0.419]],["t/240",[0,0.454]],["t/242",[0,0.419]],["t/244",[741,8.182]],["t/246",[0,0.464,5,2.159,87,1.789,159,8.399,194,3.771,500,5.016,742,4.446,743,3.568,744,3.568,745,5.369,746,5.369,747,5.369,748,5.369,749,5.369,750,3.568,751,3.568]],["t/249",[0,0.465,752,4.941,753,4.941,754,4.941,755,4.941,756,4.941,757,4.941,758,4.941,759,4.941,760,4.941,761,4.941,762,4.941,763,4.941,764,4.941,765,4.941,766,4.941,767,4.941]],["t/251",[0,0.462,79,3.593,105,3.593,206,2.507,467,3.593,482,2.914,483,2.914,659,3.593,669,4.585,670,3.314,671,3.593,678,4.235,768,4.585,769,4.235,770,3.975,771,5.115,772,5.115]],["t/253",[206,3.041,482,3.535,483,3.535,773,6.205,774,6.205,775,6.205,776,6.205,777,6.205,778,6.205,779,6.205,780,6.205,781,6.205,782,4.822,783,6.205,784,6.205,785,6.205,786,6.205,787,6.205]],["t/255",[0,0.434,6,2.682,15,1.598,23,1.397,54,3.236,167,2.758,466,4.301,511,1.866,610,1.789,692,2.381,788,5.901,789,2.657,790,2.657,791,2.657,792,2.657,793,2.657,794,2.657,795,1.866,796,2.657,797,2.657,798,2.657,799,2.657,800,2.657,801,2.064,802,2.657,803,4.758,804,4.267,805,2.381,806,2.657,807,2.657,808,2.657,809,2.657,810,2.657,811,2.657,812,2.2,813,2.381,814,2.381,815,5.069,816,2.657,817,2.657,818,2.657,819,2.657,820,2.657,821,2.657,822,2.657,823,2.381,824,2.657,825,2.381,826,2.657,827,2.657,828,2.657,829,2.657,830,2.657,831,2.657,832,2.657,833,2.657]],["t/258",[0,0.379,167,3.055,179,4.243,834,6.081,835,4.765,836,5.617,837,6.784,838,4.995,839,6.784,840,6.081,841,6.784]],["t/260",[0,0.455,23,2.267,87,1.348,167,2.429,175,1.38,179,4.221,480,1.451,485,1.682,834,4.833,835,1.889,842,3.97,843,2.689,844,2.689,845,2.689,846,2.689,847,2.689,848,2.689,849,2.689,850,2.689,851,2.689,852,2.689,853,2.689,854,2.689,855,2.689,856,2.689,857,2.792,858,5.107,859,2.41,860,2.227,861,2.09,862,2.41,863,2.227,864,2.227,865,2.41,866,5.588,867,2.227,868,2.227,869,3.97,870,3.568,871,2.227,872,2.227,873,2.227,874,2.227,875,2.41,876,2.41,877,2.41,878,2.227,879,2.227,880,2.227,881,1.889,882,2.41,883,1.98]],["t/262",[0,0.299,189,3.748,815,4.43,857,4.263,869,4.844,884,6.579,885,6.579,886,6.928,887,6.579,888,6.579,889,6.579,890,6.579]],["t/265",[0,0.408,6,2.984,22,3.722,23,3.131,85,3.055,167,3.833,838,4.382,840,6.888,891,6.266,892,5.952,893,5.952,894,5.952,895,5.952]],["t/267",[0,0.435,83,3.097,167,3.858,815,4.063,891,4.443,896,7.752,897,6.034,898,6.034,899,6.034,900,6.034,901,6.034,902,6.034]],["t/269",[0,0.445,14,2.118,167,3.555,610,4.178,891,5.811,903,6.205,904,6.205,905,6.205,906,6.205,907,6.205]],["t/271",[0,0.358,908,7.887,909,7.887]],["t/273",[0,0.443,15,1.262,83,2.168,87,2.118,131,1.982,633,3.282,653,5.449,770,3.282,910,4.224,911,4.224,912,4.224,913,4.224,914,4.224,915,4.224,916,4.224,917,6.079,918,4.224,919,4.224,920,4.224,921,4.224,922,4.224,923,4.224,924,4.224,925,4.224,926,4.224,927,7.79,928,4.224,929,4.224,930,4.224,931,4.224,932,4.224,933,4.224,934,4.224]],["t/275",[0,0.469,175,2.383,211,1.416,212,1.035,256,1.416,414,1.533,769,1.416,935,1.71,936,1.71,937,1.71,938,1.71,939,1.71,940,1.71,941,1.71,942,1.71,943,1.71,944,1.71,945,1.71,946,1.71,947,1.71,948,1.71,949,1.71,950,1.71,951,1.71,952,1.533,953,1.71,954,1.71,955,1.71,956,1.71,957,1.71,958,1.71,959,1.71,960,1.71,961,1.71,962,1.71,963,1.71,964,1.71,965,1.71,966,1.71,967,1.71,968,1.71,969,1.533,970,1.71,971,1.71,972,1.71,973,1.71,974,1.71,975,1.71,976,1.71,977,1.71,978,1.533,979,1.71,980,1.71,981,2.954,982,1.71,983,1.71,984,1.71,985,1.533,986,1.71,987,1.71,988,1.71,989,1.71,990,1.71,991,1.71,992,1.71,993,1.71,994,1.71,995,1.71,996,1.71,997,1.71,998,1.71,999,1.71,1000,1.71,1001,1.71,1002,1.71,1003,1.71,1004,1.71,1005,1.71,1006,1.71,1007,1.71,1008,1.71,1009,1.71,1010,1.71,1011,1.71,1012,1.71,1013,1.71,1014,1.71,1015,1.71,1016,1.533,1017,1.71,1018,1.71,1019,1.71,1020,1.71,1021,1.71]],["t/277",[0,0.46,6,2.398,14,1.276,15,0.675,167,2.154,175,2.455,178,1.286,206,1.107,212,2.262,305,1.083,361,1.663,474,1.412,480,1.218,482,1.286,483,1.286,485,3.476,492,2.517,496,1.586,620,1.87,649,1.87,659,4.935,712,1.663,722,1.87,835,1.586,1022,2.258,1023,2.258,1024,2.258,1025,2.258,1026,2.258,1027,6.297,1028,2.258,1029,2.258,1030,2.258,1031,2.258,1032,2.258,1033,2.024,1034,4.782,1035,4.286,1036,2.024,1037,2.024,1038,4.286,1039,2.024,1040,2.258,1041,2.258,1042,2.258,1043,3.738,1044,2.024,1045,2.258,1046,2.258,1047,2.258,1048,2.258,1049,2.258,1050,1.87,1051,2.258,1052,2.258,1053,2.258,1054,2.258]],["t/279",[0,0.453,6,2.424,105,4.165,175,2.482,426,2.417,681,2.29,683,2.29,684,2.417,685,2.417,686,2.417,688,2.576,835,2.185,1027,5.993,1035,5.993,1036,2.788,1037,2.788,1038,4.333,1039,2.788,1044,2.788,1055,3.111,1056,4.834,1057,3.111,1058,3.111,1059,3.111,1060,3.111,1061,3.111,1062,3.111,1063,3.111,1064,3.111,1065,3.111,1066,4.834,1067,3.111,1068,3.111,1069,3.111,1070,3.111,1071,5.929,1072,3.111,1073,3.111,1074,3.111,1075,3.111,1076,3.111,1077,3.111,1078,3.111,1079,3.111,1080,3.111]],["t/281",[0,0.454,87,3.29,211,2.487,305,1.44,467,6.418,662,2.692,838,2.212,969,2.692,978,2.692,1081,3.004,1082,3.004,1083,3.004,1084,3.004,1085,3.004,1086,3.004,1087,3.004,1088,3.004,1089,3.004,1090,3.004,1091,3.004,1092,3.004,1093,3.004,1094,3.004,1095,3.004,1096,3.004,1097,3.004,1098,3.004,1099,3.004,1100,3.004,1101,3.004,1102,3.004,1103,3.004,1104,3.004,1105,3.004,1106,3.004,1107,3.004,1108,3.004,1109,3.004,1110,3.004,1111,3.004,1112,3.004,1113,4.704,1114,2.487,1115,3.004,1116,3.004,1117,3.004]],["t/283",[0,0.372,265,5.448,484,5.112,610,4.43,836,5.448,1118,6.579,1119,6.579,1120,6.579,1121,6.579,1122,6.579,1123,6.579,1124,6.579,1125,6.579]],["t/285",[0,0.461,167,2.594,254,5.162,426,3.047,671,5.628,1126,3.921,1127,3.921,1128,3.921,1129,3.921,1130,5.759,1131,5.759,1132,5.759,1133,3.921,1134,3.921,1135,3.921,1136,3.921,1137,3.921,1138,3.921,1139,3.921,1140,3.921,1141,3.921,1142,3.921,1143,3.921,1144,3.921,1145,3.921]],["t/287",[0,0.448,79,5.863,305,2.741,474,3.576,711,5.125,712,4.21,714,5.125,715,5.125,881,4.016,1146,5.718,1147,5.718,1148,5.718,1149,4.735,1150,5.718]],["t/289",[0,0.455,710,3.705,770,6.365,1151,5.502,1152,5.502,1153,4.275,1154,5.502,1155,5.502,1156,5.502,1157,4.275,1158,5.502,1159,5.502,1160,5.502,1161,5.502]],["t/291",[0,0.468,305,1.784,474,2.327,649,3.081,670,4.288,712,2.739,717,3.081,722,3.081,726,3.335,727,3.335,881,2.613,1033,3.335,1149,3.081,1162,3.72,1163,3.72,1164,3.72,1165,3.72,1166,3.72,1167,3.72,1168,3.72,1169,3.72,1170,3.72,1171,3.72,1172,3.72,1173,3.72,1174,3.72,1175,3.72]],["t/293",[305,3.468,670,4.687,672,6.483,719,6.483,1149,5.989,1176,7.233,1177,7.233,1178,7.233]],["t/295",[0,0.465,14,1.452,15,2.049,19,1.201,93,2.756,167,0.865,175,1.675,305,0.921,466,1.349,678,2.702,805,1.721,812,1.59,815,1.293,823,3.813,1179,1.92,1180,5.62,1181,3.263,1182,1.92,1183,5.62,1184,8.514,1185,1.92,1186,1.92,1187,1.92,1188,1.92,1189,1.92,1190,1.92,1191,4.254,1192,1.92,1193,1.92,1194,1.92,1195,1.92,1196,6.11,1197,1.92,1198,1.92,1199,1.92,1200,1.92,1201,1.92,1202,1.92,1203,1.92,1204,1.92,1205,1.92,1206,1.92,1207,1.92,1208,1.92,1209,1.92,1210,1.92,1211,1.92,1212,1.92,1213,1.92,1214,2.925]],["t/298",[0,0.456,14,1.907,15,1.343,83,1.456,85,1.456,93,1.342,100,1.342,131,0.972,136,2.71,167,2.516,175,0.588,178,0.652,179,1.295,206,0.561,212,1.716,480,2.171,482,0.652,483,0.652,484,0.89,485,3.068,492,3.952,496,1.992,511,1.455,627,1.026,659,0.804,665,1.026,670,0.742,717,1.715,742,2.348,788,0.771,795,2.443,838,0.843,857,1.342,858,2.88,860,0.948,861,2.702,863,0.948,864,0.948,866,3.332,867,0.948,868,0.948,869,2.561,870,2.348,871,0.948,872,0.948,873,0.948,874,0.948,878,0.948,879,2.88,880,3.332,881,1.455,882,1.026,883,2.561,1050,0.948,1215,1.026,1216,1.145,1217,1.026,1218,1.026,1219,1.856,1220,1.026,1221,0.948,1222,2.836,1223,2.071,1224,1.715,1225,3.478,1226,2.071,1227,2.071,1228,2.071,1229,1.145,1230,1.145,1231,1.145,1232,2.071,1233,2.071,1234,2.071,1235,2.071,1236,2.071,1237,1.145,1238,2.071,1239,1.145,1240,3.478,1241,2.071,1242,1.145,1243,1.145,1244,1.145,1245,2.071,1246,1.145,1247,2.071,1248,2.071,1249,2.071,1250,1.145,1251,1.145,1252,1.145,1253,1.145,1254,1.145,1255,1.145,1256,2.836,1257,1.145,1258,1.145,1259,2.071,1260,1.145,1261,1.145,1262,1.145,1263,1.145,1264,1.145,1265,2.071]],["t/300",[0,0.444,14,0.553,15,0.843,85,1.922,154,1.139,167,3.218,175,1.922,178,0.923,179,1.764,206,0.794,212,2.708,231,2.528,480,3.584,482,0.923,483,0.923,485,1.764,659,1.139,710,1.092,768,1.453,770,1.26,788,1.092,795,1.981,857,1.827,858,3.706,859,1.453,860,1.342,861,1.26,862,1.453,863,1.342,864,1.342,865,1.453,866,4.199,867,1.342,868,1.342,869,4.098,870,3.1,871,1.342,872,1.342,873,1.342,874,1.342,875,1.453,876,1.453,877,1.453,878,1.342,879,1.342,880,2.335,881,1.981,883,2.077,1050,1.342,1153,1.26,1157,1.26,1215,1.453,1217,1.453,1218,1.453,1219,2.528,1220,1.453,1224,1.342,1266,1.621,1267,1.621,1268,2.82,1269,2.82,1270,1.621,1271,1.621,1272,1.621,1273,1.621,1274,1.621,1275,1.621,1276,1.621,1277,1.621,1278,1.621,1279,1.621,1280,1.621,1281,1.621,1282,1.621,1283,1.621,1284,1.621,1285,1.621,1286,1.621,1287,1.621,1288,1.621,1289,1.621,1290,1.621,1291,1.621,1292,1.621,1293,1.621,1294,1.621,1295,1.621,1296,1.621,1297,1.621,1298,1.26,1299,1.621,1300,1.621,1301,1.342,1302,1.621,1303,1.621,1304,1.621,1305,1.621,1306,1.621,1307,1.621,1308,1.621,1309,1.621,1310,1.621,1311,1.621,1312,1.621,1313,1.621,1314,1.621,1315,1.621,1316,1.621,1317,1.621,1318,1.621,1319,1.621,1320,1.621,1321,2.82,1322,1.621]],["t/302",[0,0.446,5,2.139,14,1.36,15,1.056,22,0.598,47,2.351,85,1.553,87,0.882,94,0.672,100,0.62,131,0.449,154,1.235,167,3.037,175,0.903,178,0.545,179,1.892,189,1.002,194,0.672,480,2.553,500,2.351,610,3.399,626,1.576,710,2.038,742,1.456,788,3.764,838,2.228,857,2.581,869,0.704,881,0.672,883,0.704,1114,0.792,1224,2.02,1323,0.957,1324,0.957,1325,0.957,1326,1.758,1327,2.44,1328,0.957,1329,0.957,1330,2.44,1331,0.957,1332,3.026,1333,1.758,1334,0.957,1335,0.957,1336,0.957,1337,0.957,1338,0.957,1339,0.957,1340,0.957,1341,1.758,1342,4.379,1343,1.758,1344,0.743,1345,3.536,1346,1.758,1347,0.957,1348,0.957,1349,1.758,1350,0.957,1351,0.957,1352,0.957,1353,1.758,1354,0.957,1355,0.957,1356,5.825,1357,2.44,1358,2.44,1359,3.983,1360,0.957,1361,0.957,1362,0.957,1363,0.957,1364,0.957,1365,0.957,1366,0.957,1367,0.957,1368,0.957,1369,0.957,1370,0.957,1371,0.957,1372,0.957,1373,0.957,1374,1.758,1375,2.44,1376,2.44,1377,1.758,1378,0.957,1379,2.44,1380,1.758,1381,0.957,1382,1.758,1383,0.957,1384,0.957,1385,0.957,1386,0.957,1387,0.957,1388,0.957,1389,2.44,1390,0.957,1391,1.758,1392,1.456,1393,0.957,1394,0.957,1395,0.957,1396,1.758,1397,0.957,1398,0.957,1399,0.957,1400,0.957,1401,0.957,1402,0.957,1403,0.957,1404,0.957,1405,0.957,1406,0.957,1407,0.957,1408,0.957,1409,0.957,1410,0.957,1411,0.957,1412,0.957,1413,0.957,1414,0.957,1415,1.758,1416,0.957,1417,0.957,1418,0.957,1419,0.957,1420,0.957,1421,0.957,1422,0.957,1423,0.957,1424,0.957,1425,0.957,1426,0.957,1427,0.957,1428,0.957,1429,0.957,1430,0.957,1431,0.957,1432,0.957,1433,1.758,1434,0.957,1435,0.957,1436,1.758,1437,1.758,1438,0.957,1439,0.957,1440,0.957,1441,0.957,1442,0.957,1443,0.957,1444,0.957,1445,0.957,1446,0.957,1447,0.957,1448,0.957]],["t/305",[0,0.457,14,1.409,15,1.233,154,2.899,167,3.531,175,0.741,178,0.823,179,0.903,206,0.708,466,1.79,480,3.543,482,0.823,483,0.823,485,0.903,710,0.972,782,1.122,788,0.972,795,2.899,801,1.122,803,1.122,815,0.972,835,1.014,857,0.936,886,1.122,1153,1.122,1157,1.122,1221,1.196,1298,2.658,1301,1.196,1392,1.196,1449,1.196,1450,1.196,1451,1.294,1452,1.294,1453,1.294,1454,1.196,1455,1.294,1456,2.284,1457,2.284,1458,2.284,1459,2.284,1460,2.549,1461,2.549,1462,1.444,1463,4.95,1464,2.284,1465,1.294,1466,2.284,1467,2.284,1468,2.11,1469,2.11,1470,2.284,1471,1.294,1472,1.294,1473,1.294,1474,1.294,1475,1.294,1476,1.294,1477,1.294,1478,1.294,1479,1.294,1480,3.699,1481,1.294,1482,1.294,1483,1.294,1484,1.294,1485,1.294,1486,1.294,1487,1.294,1488,1.294,1489,1.294,1490,1.294,1491,1.294,1492,1.294,1493,1.294,1494,3.066,1495,3.066,1496,1.294,1497,1.294,1498,1.294,1499,1.294,1500,1.294,1501,1.196,1502,1.196,1503,1.196,1504,1.294,1505,1.294,1506,1.294,1507,1.294,1508,1.294,1509,1.294,1510,2.11,1511,1.294,1512,1.294,1513,1.294,1514,3.421,1515,1.196,1516,1.294]],["t/307",[0,0.417,14,1.669,15,1.461,154,3.434,167,3.795,175,0.948,178,1.052,179,1.155,206,0.905,466,2.217,480,3.934,482,1.052,483,1.052,485,1.155,710,1.244,782,1.435,788,1.244,795,3.434,801,1.435,803,1.435,815,1.244,835,1.298,857,1.197,886,1.435,1153,1.435,1157,1.435,1221,1.53,1298,3.212,1301,1.53,1392,1.53,1449,1.53,1450,1.53,1451,1.656,1452,1.656,1453,1.656,1454,1.53,1455,1.656,1456,2.829,1457,2.829,1458,2.829,1459,2.829,1463,5.294,1464,2.829,1465,1.656,1466,2.829,1467,2.829,1468,2.614,1469,2.614,1470,2.829,1471,1.656,1472,1.656,1473,1.656,1474,1.656,1475,1.656,1476,1.656,1477,1.656,1478,1.656,1479,1.656,1480,4.383,1481,1.656,1482,1.656,1483,1.656,1484,1.656,1485,1.656,1486,1.656,1487,1.656,1488,1.656,1489,1.656,1490,1.656,1491,1.656,1492,1.656,1493,1.656,1494,3.705,1495,3.705,1496,1.656,1497,1.656,1498,1.656,1499,1.656,1500,1.656,1501,1.53,1502,1.53,1503,1.53,1504,1.656,1505,1.656,1506,1.656,1507,1.656,1508,1.656,1509,1.656,1510,2.614,1511,1.656,1512,1.656,1513,1.656,1515,1.53,1516,1.656]],["t/309",[0,0.377,14,0.985,15,1.687,19,2.85,154,2.026,167,3.827,175,1.481,178,1.643,179,1.804,206,1.414,466,3.201,480,3.462,482,1.643,483,1.643,782,2.241,788,4.32,795,3.201,801,2.241,803,2.241,813,2.586,814,2.586,815,1.942,825,2.586,857,1.869,886,2.241,1298,3.541,1449,2.389,1450,2.389,1454,2.389,1463,3.773,1468,2.389,1469,2.389,1501,2.389,1502,2.389,1503,2.389,1510,2.389,1515,2.389,1517,2.885,1518,2.885,1519,2.885,1520,2.885,1521,2.885,1522,2.885,1523,2.885,1524,2.885,1525,2.885,1526,2.885,1527,2.885,1528,2.885,1529,2.885,1530,2.885,1531,2.885,1532,2.885,1533,2.885,1534,2.885,1535,2.026,1536,2.885,1537,2.885,1538,2.885,1539,2.885,1540,2.885,1541,2.885,1542,2.885,1543,2.885,1544,2.885,1545,2.885,1546,2.885,1547,2.885,1548,2.885]],["t/312",[0,0.448]],["t/314",[0,0.419,131,2.997,1549,6.387,1550,6.387,1551,6.387,1552,6.387,1553,6.387,1554,6.387,1555,6.387,1556,6.387,1557,6.387,1558,6.387,1559,6.387]],["t/316",[0,0.413,1560,6.891,1561,6.891,1562,6.891,1563,6.891,1564,6.891,1565,6.176,1566,6.176,1567,6.891]],["t/320",[0,0.397,1,3.327,3,3.638,4,3.638,5,2.99,6,3.404,7,3.638,9,3.638,10,3.471,12,4.998,14,1.687,15,2.028,16,3.09,17,4.107,18,3.202,19,3.09,20,3.327,21,3.471,22,4.245,23,2.599,24,3.638,25,3.638,323,3.84,1568,4.941,1569,4.941,1570,4.941,1571,4.941]],["t/322",[0,0.424,6,4.382,10,3.118,14,2.984,15,1.326,16,2.776,17,3.811,18,2.876,19,2.776,20,2.989,21,3.118,22,3.939,23,2.335,31,4.638,32,4.638,33,4.638,36,3.118,37,3.268,39,3.268,1572,4.439,1573,4.439,1574,4.439,1575,4.439]],["t/325",[0,0.402,48,5.367,50,5.367,51,6.718,54,3.922,55,5.367,56,5.367,57,5.367,1576,6.482,1577,6.482,1578,6.482,1579,6.482]],["t/327",[0,0.442,15,1.331,16,1.28,17,1.238,45,1.59,59,1.507,83,2.683,108,2.535,127,2.318,131,1.616,134,2.535,323,5.891,511,2.418,710,3.519,1580,2.046,1581,2.046,1582,2.046,1583,2.046,1584,3.443,1585,2.046,1586,2.046,1587,2.046,1588,2.046,1589,2.046,1590,2.046,1591,2.046,1592,2.046,1593,5.226,1594,2.046,1595,2.046,1596,2.046,1597,2.046,1598,2.046,1599,2.046,1600,4.457,1601,2.046,1602,2.046,1603,2.046,1604,2.046,1605,3.443,1606,2.046,1607,2.046,1608,2.046,1609,2.046,1610,2.046,1611,2.046,1612,2.046,1613,3.443,1614,2.046,1615,2.046,1616,2.046,1617,2.046,1618,2.046,1619,2.046,1620,2.046,1621,2.046,1622,2.046,1623,2.046,1624,2.046,1625,3.443,1626,2.046,1627,2.046,1628,2.046,1629,2.046,1630,2.046,1631,2.046,1632,2.046,1633,2.046,1634,2.046,1635,2.046,1636,2.046,1637,1.834,1638,3.443,1639,2.046,1640,4.457,1641,3.086,1642,1.834,1643,1.834,1644,5.226,1645,2.046,1646,3.443,1647,2.046,1648,2.046,1649,2.046,1650,2.046]],["t/330",[524,4.905,1651,7.481,1652,7.481,1653,7.481,1654,6.706]],["t/332",[0,0.39,189,4.645,524,5.453,541,2.83,1565,4.192,1566,4.192,1654,4.192,1655,4.677,1656,4.677,1657,4.677,1658,6.534,1659,4.677,1660,4.677,1661,4.677,1662,4.677,1663,4.677,1664,4.677,1665,4.677,1666,4.677,1667,4.677,1668,4.677]],["t/334",[0,0.308,136,5.619,524,3.759,671,4.765,1669,6.784,1670,6.784,1671,6.784,1672,6.784,1673,6.784,1674,6.784,1675,6.784]],["t/336",[0,0.375,83,4.243,93,4.328,524,4.974,1676,6.68,1677,6.68,1678,6.68,1679,6.68,1680,6.68]],["t/338",[0,0.313,83,4.325,524,5.043,1681,6.891,1682,6.891,1683,6.891,1684,6.891,1685,6.891]],["t/340",[0,0.399,15,1.908,83,3.279,93,4.138,523,5.725,524,5.115,671,4.486,812,5.289,1686,6.387,1687,6.387,1688,6.387]],["t/343",[59,5.077,194,4.843,206,3.38,524,3.821,842,5.077,985,4.532,1689,7.555,1690,5.71,1691,5.056,1692,5.056,1693,5.056,1694,5.056,1695,4.532,1696,6.181,1697,5.056,1698,5.056,1699,5.056,1700,5.056,1701,5.056,1702,6.181,1703,3.928,1704,5.056,1705,4.532]],["t/345",[59,3.443,194,3.285,206,3.691,232,4.192,300,3.872,388,4.192,422,4.192,524,2.591,670,4.88,769,5.411,842,4.811,1689,7.308,1690,3.872,1695,4.192,1702,5.857,1703,3.634,1705,6.751,1706,4.677,1707,4.677,1708,4.677,1709,3.872,1710,4.677,1711,4.192,1712,4.677,1713,4.677,1714,4.677,1715,4.677,1716,4.677,1717,4.677]],["t/347",[206,3.13,670,5.699,842,5.918,1690,5.289,1696,5.725,1703,6.246,1711,5.725,1718,5.725,1719,6.387,1720,6.387,1721,6.387,1722,6.387]],["t/349",[0,0.438,1535,4.692,1723,8.046,1724,6.68,1725,5.191,1726,5.988,1727,6.68]],["t/351",[0,0.436,15,1.493,131,2.345,610,3.365,1535,3.511,1725,6.061,1728,4.998,1729,9.072,1730,6.841,1731,4.998,1732,4.998,1733,4.998,1734,4.998,1735,4.998,1736,4.48,1737,4.998,1738,4.998,1739,4.998,1740,4.998]],["t/353",[0,0.416,85,3.594,541,4.236,1344,5.44,1723,6.276,1741,7.001,1742,7.001,1743,7.001]],["t/355",[0,0.432,6,0.922,14,1.074,15,1.457,16,1.15,54,1.903,83,2.503,127,2.118,134,2.316,361,1.354,511,2.209,541,4.946,891,3.59,1535,2.209,1641,2.819,1642,1.649,1643,1.649,1725,1.429,1744,1.84,1745,1.84,1746,1.84,1747,1.84,1748,3.145,1749,6.029,1750,1.84,1751,1.84,1752,1.84,1753,1.84,1754,1.84,1755,1.84,1756,2.819,1757,2.819,1758,1.84,1759,1.84,1760,1.84,1761,1.84,1762,1.84,1763,1.84,1764,4.37,1765,1.84,1766,6.294,1767,4.37,1768,3.145,1769,1.84,1770,1.84,1771,1.84,1772,1.84,1773,1.84,1774,1.84,1775,1.84,1776,1.84,1777,1.84,1778,1.84,1779,1.84,1780,1.84,1781,1.84,1782,1.84,1783,1.84,1784,1.84,1785,1.84,1786,1.649,1787,1.84,1788,4.12,1789,4.876,1790,4.911,1791,4.12,1792,1.84,1793,1.84,1794,1.84,1795,1.84,1796,1.84,1797,3.145,1798,1.84,1799,1.84,1800,1.84,1801,2.604,1802,1.84,1803,1.84,1804,1.649,1805,1.84,1806,1.84,1807,1.84,1808,1.84]],["t/357",[0,0.402,131,2.171,189,3.695,541,4.911,891,5.514,1535,3.25,1749,4.147,1756,4.147,1757,4.147,1764,6.712,1786,4.147,1790,4.147,1801,5.37,1804,4.147,1809,4.627,1810,4.627,1811,4.627,1812,4.627,1813,4.627,1814,4.627,1815,4.627,1816,4.627,1817,4.627,1818,4.627,1819,4.627,1820,4.627,1821,4.627,1822,4.627]],["t/359",[0,0.464,5,2.556,6,2.118,14,1.442,54,2.556,85,2.168,87,2.118,94,2.967,189,2.407,541,3.678,1823,6.079,1824,4.224,1825,4.224,1826,4.224,1827,4.224,1828,6.079,1829,4.224,1830,4.224,1831,4.224,1832,4.224,1833,4.224,1834,4.224,1835,4.224]],["t/361",[0,0.408,541,4.65,836,4.928,1535,4.18,1718,5.335,1725,6.613,1726,5.335,1836,5.952,1837,5.952,1838,5.952,1839,7.684,1840,5.952,1841,5.952,1842,5.952]],["t/363",[0,0.427,15,1.686,1843,5.644,1844,5.644,1845,5.644,1846,5.644,1847,5.644,1848,5.644,1849,5.644,1850,5.644,1851,5.644,1852,5.644,1853,5.644,1854,5.644,1855,5.644,1856,5.644,1857,5.644,1858,5.644,1859,5.644,1860,5.644]],["t/366",[2,5.807,1736,7.069,1861,7.887]],["t/368",[0,0.358,1,5.311,1862,7.887]],["t/370",[1863,8.182]],["t/372",[0,0.384,14,2.004,69,6.307,1344,4.562,1864,5.872,1865,5.872,1866,5.872,1867,5.872,1868,5.872,1869,5.263,1870,5.263,1871,5.872,1872,5.872,1873,5.872,1874,5.872,1875,5.872,1876,5.872,1877,5.872,1878,5.872]],["t/374",[0,0.419]],["t/376",[1879,7.747,1880,7.747,1881,7.747,1882,7.747]],["t/378",[85,3.014,323,6.57,541,3.553,1016,5.263,1883,7.617,1884,7.617,1885,5.872,1886,5.263,1887,5.872,1888,5.263,1889,5.263,1890,5.872,1891,7.617,1892,5.872,1893,5.872,1894,5.872,1895,5.263]],["t/380",[0,0.411,85,3.281,541,5.13,1766,5.728,1767,5.728,1801,6.131,1886,4.061,1888,5.728,1889,5.728,1896,4.531,1897,4.531,1898,6.391,1899,4.531,1900,4.531,1901,4.531,1902,4.061,1903,4.531,1904,4.531,1905,4.531,1906,4.531,1907,4.531,1908,4.531,1909,4.531,1910,4.531,1911,4.531,1912,4.531]],["t/383",[0,0.346,1913,7.612,1914,7.612,1915,7.612,1916,7.612]],["t/385",[29,5.54,1917,7.069,1918,7.887]],["t/387",[0,0.425,1114,6.09,1917,6.593,1919,7.355,1920,7.355]],["t/390",[136,5.216,1921,7.747,1922,7.747,1923,6.944]],["t/392",[0,0.422,1,0.864,6,2.974,14,0.438,15,1.445,16,0.802,17,3.177,18,0.831,19,0.802,20,2.095,23,1.207,54,3.589,61,1.611,64,1.9,68,3.394,100,0.831,105,0.901,118,0.997,136,2.095,344,2.789,361,0.945,480,2.043,613,1.062,698,5.031,735,2.789,842,1.689,1214,1.15,1637,2.056,1703,1.783,1869,3.394,1870,1.15,1923,1.15,1924,1.283,1925,2.294,1926,1.283,1927,1.283,1928,1.283,1929,1.283,1930,1.283,1931,1.283,1932,1.283,1933,1.283,1934,2.294,1935,1.283,1936,2.294,1937,2.294,1938,1.283,1939,2.294,1940,1.283,1941,1.283,1942,1.283,1943,1.283,1944,1.283,1945,1.283,1946,2.294,1947,1.283,1948,3.112,1949,2.294,1950,2.294,1951,1.283,1952,1.283,1953,1.283,1954,1.283,1955,1.283,1956,1.283,1957,1.283,1958,1.283,1959,1.283,1960,1.283,1961,2.294,1962,2.294,1963,1.283,1964,1.283,1965,1.283,1966,1.283,1967,3.112,1968,1.283,1969,2.294,1970,1.283,1971,1.283,1972,3.112,1973,2.294,1974,1.283,1975,2.294,1976,2.294,1977,2.294,1978,2.294,1979,1.283,1980,1.283,1981,3.112,1982,3.787,1983,4.353,1984,1.283,1985,2.294,1986,1.283,1987,1.283,1988,1.283,1989,1.283,1990,1.283,1991,1.283,1992,1.283,1993,1.283,1994,1.283,1995,1.283,1996,1.283,1997,1.283,1998,1.283,1999,1.283,2000,1.283,2001,3.787,2002,1.283,2003,1.283,2004,1.283,2005,1.283,2006,1.15,2007,1.283,2008,1.283,2009,1.283,2010,1.283,2011,1.283,2012,1.283,2013,1.283,2014,1.283,2015,1.283,2016,1.283,2017,1.283,2018,1.283,2019,2.294,2020,2.294,2021,2.294,2022,2.294,2023,2.294,2024,1.283,2025,1.283,2026,1.283,2027,1.283,2028,1.283,2029,1.283,2030,1.283,2031,1.283,2032,1.283,2033,1.283,2034,1.283,2035,1.283,2036,1.283,2037,1.283,2038,1.283]],["t/394",[14,2.742,2039,8.032]],["t/398",[0,0.372]],["t/400",[0,0.443,14,2.556,85,3.844,226,4.147,541,4.531,633,3.595,1709,3.831,1895,4.147,2040,4.627,2041,4.627,2042,4.627,2043,4.627,2044,4.627,2045,4.627,2046,4.627,2047,4.627,2048,4.627,2049,7.488,2050,4.627,2051,4.627,2052,4.627,2053,4.627,2054,4.627]],["t/402",[15,2.356,2055,7.887,2056,7.887]],["t/405",[0,0.439,14,1.122,15,1.279,78,1.604,79,1.361,80,1.505,81,1.604,82,2.722,83,4.381,84,1.604,85,3.535,86,1.604,87,2.832,88,1.604,89,1.604,90,1.604,91,1.604,92,1.604,93,3.269,94,1.361,95,1.604,96,2.722,97,1.604,98,1.604,99,1.604,100,3.66,101,1.604,102,1.604,103,1.604,104,1.604,105,1.361,106,1.604,107,1.604,108,3.153,109,1.604,110,2.722,111,1.604,112,1.604,113,1.604,114,1.604,115,1.604,116,1.604,117,1.604,118,2.554,119,1.604,120,2.722,121,1.604,122,2.722,123,1.604,124,1.604,125,1.604,126,1.604,127,2.214,128,1.604,129,1.604,130,3.546,131,3.071,132,1.427,133,2.722,134,1.427,135,1.604,136,3.804,137,2.722,138,2.722,139,1.604,140,1.604,141,1.604,142,1.604,143,1.604,144,1.604,145,1.604,146,1.604,147,1.604,148,1.604,149,1.604]],["t/409",[0,0.466,15,2.237,83,4.166,100,5.259,2057,4.627,2058,4.627,2059,4.627,2060,4.627,2061,4.627,2062,4.627,2063,4.627]],["t/411",[0,0.334,2064,7.355,2065,7.355,2066,7.355,2067,7.355,2068,7.355,2069,7.355]],["t/413",[0,0.419]],["t/415",[453,5.747]],["t/417",[0,0.358,2070,7.887,2071,7.887]],["t/419",[2072,7.887,2073,7.887,2074,6.128]],["t/422",[0,0.34,132,5.508,426,5.813,2074,6.879,2075,7.481]],["t/424",[0,0.442,14,1.832,17,3.247,61,3.769,2006,4.81,2074,6.286,2076,4.81,2077,5.366,2078,5.366,2079,7.251,2080,5.366,2081,5.366,2082,4.81,2083,6.435,2084,5.366,2085,5.366]],["t/426",[0,0.389,14,2.647,17,3.651,2074,6.656,2076,5.408,2079,6.949,2082,5.408,2083,5.408,2086,6.034,2087,6.034,2088,6.034,2089,6.034,2090,6.034,2091,6.034]],["t/428",[0,0.372]],["t/430",[0,0.437,5,4.344,18,4.652,252,6.435,2092,5.366,2093,4.81,2094,8.09,2095,5.366,2096,5.366,2097,5.366,2098,4.81,2099,8.09,2100,5.366,2101,5.366,2102,5.366]],["t/432",[0,0.34,432,6.706,2103,7.481,2104,7.481,2105,7.481,2106,7.481]],["t/434",[0,0.41,2093,6.944,2098,6.944]],["t/437",[1902,6.276,2107,7.001,2108,7.001,2109,7.001,2110,7.001,2111,7.001,2112,7.001,2113,7.001,2114,7.001,2115,6.276]],["t/439",[0,0.344,36,3.32,500,3.673,952,4.237,2115,4.237,2116,4.727,2117,4.727,2118,7.575,2119,6.584,2120,4.727,2121,4.727,2122,4.727,2123,4.727,2124,4.727,2125,4.727,2126,4.727,2127,4.727,2128,4.727,2129,4.727,2130,4.727,2131,6.584,2132,4.727,2133,6.584,2134,6.584,2135,7.575,2136,4.727,2137,4.727,2138,4.727,2139,4.727,2140,4.727]],["t/442",[2141,8.182]],["t/444",[0,0.405,189,3.748,305,3.154,382,5.897,2142,6.579,2143,8.916,2144,6.579,2145,6.579,2146,6.579,2147,6.579]],["t/446",[0,0.365,412,7.199]],["t/448",[453,5.747]],["t/450",[0,0.403,27,3.141,69,3.532,83,2.19,461,3.823,591,3.314,861,3.314,883,3.141,1344,3.314,1709,3.532,2148,4.266,2149,6.122,2150,4.266,2151,4.266,2152,4.266,2153,4.266,2154,4.266,2155,4.266,2156,7.825,2157,7.825,2158,7.825,2159,7.825,2160,6.122,2161,4.266,2162,6.122,2163,4.266,2164,4.266,2165,4.266,2166,4.266]],["t/452",[0,0.415,2167,7.887]],["t/454",[0,0.365,2168,8.032]],["t/456",[0,0.452,194,4.692,2169,6.68,2170,8.266,2171,6.68,2172,6.68]],["t/458",[0,0.419]],["t/460",[0,0.419]],["t/462",[453,5.747]]],"invertedIndex":[["",{"_index":0,"t":{"2":{"position":[[0,7],[18,1],[55,1],[68,1],[146,1],[330,1],[375,1],[460,1],[478,1],[482,1],[490,1],[596,1],[614,1],[618,1],[626,1],[734,1],[847,1]]},"4":{"position":[[10,1],[47,1],[60,1],[138,1],[322,1],[367,1],[452,1],[470,1],[474,1],[482,1],[588,1],[606,1],[610,1],[618,1],[726,1],[839,1]]},"6":{"position":[[19,4],[61,19],[113,1],[213,1],[297,5],[310,5],[328,5]]},"9":{"position":[[322,18],[341,23],[439,7],[447,11],[459,4],[464,19]]},"11":{"position":[[0,2],[3,1],[5,9],[15,36],[52,28]]},"13":{"position":[[0,36],[37,28]]},"15":{"position":[[0,7],[18,1],[55,1],[68,1],[146,1],[330,1],[375,1],[460,1],[478,1],[482,1],[490,1],[596,1],[614,1],[618,1],[626,1],[734,1],[847,1]]},"17":{"position":[[10,1],[47,1],[60,1],[138,1],[322,1],[367,1],[452,1],[470,1],[474,1],[482,1],[588,1],[606,1],[610,1],[618,1],[726,1],[839,1]]},"19":{"position":[[19,4],[61,19],[113,1],[213,1],[297,5],[310,5],[328,5]]},"21":{"position":[[0,9],[97,1],[116,1],[135,1],[155,1],[174,1],[194,1],[223,1],[281,1],[307,1],[337,1],[402,1],[499,1],[532,1],[538,1],[565,1],[570,1],[603,1],[608,1],[620,3],[624,1],[639,1],[728,1],[776,1],[778,8],[789,1],[876,1],[907,2],[944,1],[1059,1],[1087,1],[1115,1],[1148,3],[1221,3],[1227,1],[1268,1],[1309,1],[1342,3],[1424,3]]},"23":{"position":[[87,1],[106,1],[125,1],[145,1],[164,1],[184,1],[213,1],[271,1],[297,1],[327,1],[392,1],[489,1],[522,1],[528,1],[555,1],[560,1],[593,1],[598,1],[610,3],[614,1],[629,1],[718,1],[766,1],[768,8],[779,1],[866,1],[897,2],[934,1],[1049,1],[1077,1],[1105,1],[1138,3],[1211,3],[1217,1],[1258,1],[1299,1],[1332,3],[1414,3]]},"27":{"position":[[123,1],[159,1],[184,1],[200,1],[220,2],[232,1],[256,9],[266,1],[282,1],[292,1],[315,1],[347,2],[374,1],[415,1],[417,8],[441,1],[472,1],[479,1],[493,1],[495,1],[497,1],[499,1],[501,1],[572,1],[630,4],[635,2],[638,1],[666,1],[736,1],[805,1],[863,1],[881,1],[883,1],[885,1],[936,1],[1002,2],[1335,1],[1371,1],[1396,1],[1412,1],[1432,2],[1444,1],[1468,9],[1478,1],[1494,1],[1504,1],[1527,1],[1559,2],[1586,1],[1656,1],[1687,1],[1694,1],[1708,1],[1710,1],[1712,1],[1714,1],[1716,1],[1718,2],[1721,1],[1750,1],[1803,1],[1830,2],[1871,1],[2145,2],[2254,1],[2312,2],[2340,2],[2352,1]]},"29":{"position":[[67,2],[225,15],[365,1],[393,1],[514,1],[516,14],[587,1],[589,14],[718,1],[747,1],[822,1],[849,1],[907,1],[967,1],[1024,13],[1053,36]]},"33":{"position":[[0,23],[24,25],[50,23],[74,6],[81,4],[86,6],[93,17]]},"35":{"position":[[0,20],[21,36],[58,13],[72,22]]},"37":{"position":[[0,14]]},"44":{"position":[[72,31],[277,5],[283,5]]},"46":{"position":[[0,57]]},"50":{"position":[[74,2],[233,3],[246,2],[368,11],[380,7],[401,5],[407,2],[410,28],[439,39]]},"54":{"position":[[33,44],[78,47]]},"57":{"position":[[0,2],[54,19],[74,13]]},"59":{"position":[[0,2]]},"62":{"position":[[0,2],[3,19]]},"66":{"position":[[0,10],[11,2],[14,31],[46,5],[52,2],[55,20],[76,7],[84,7],[92,15]]},"69":{"position":[[0,37],[38,2],[41,23],[65,30]]},"71":{"position":[[0,7],[8,20],[39,13]]},"74":{"position":[[0,15],[16,43],[60,15],[76,22]]},"76":{"position":[[12,9],[22,24],[90,2],[93,4],[107,1],[270,1],[444,1],[631,1]]},"79":{"position":[[0,61],[62,42],[105,56],[162,39],[202,32],[235,47]]},"81":{"position":[[0,40],[41,61],[135,2],[173,7],[181,4],[207,13]]},"83":{"position":[[0,42],[45,5],[51,32],[86,5],[92,20]]},"85":{"position":[[0,46],[49,5],[71,4],[78,5],[84,54]]},"87":{"position":[[0,27],[28,6],[35,17],[53,22]]},"91":{"position":[[28,14],[43,16],[60,7],[68,11],[80,8],[195,10]]},"95":{"position":[[19,9],[29,9],[39,7]]},"97":{"position":[[0,9],[10,20]]},"99":{"position":[[49,5],[105,17],[123,19],[143,19]]},"101":{"position":[[0,6],[167,16],[184,8],[258,58],[317,3],[354,5],[441,17],[459,2],[487,2],[514,2],[517,4],[538,2],[557,3],[578,5],[633,1],[702,2],[733,1],[799,1],[862,1],[929,1],[970,2],[982,1],[1060,2],[1097,3],[1143,5],[1198,1],[1234,2],[1259,2],[1287,2],[1290,4],[1308,2],[1421,1],[1449,1],[1570,1],[1572,14],[1643,1],[1645,14],[1774,1],[1803,1],[1878,1],[1905,1],[1963,1],[2023,1],[2080,5],[2086,12],[2099,16],[2116,15]]},"112":{"position":[[56,10],[67,14],[82,15],[98,13]]},"114":{"position":[[0,2],[3,14]]},"118":{"position":[[0,2]]},"120":{"position":[[0,2]]},"122":{"position":[[0,31]]},"124":{"position":[[39,1],[41,2],[44,1],[46,23],[70,2],[90,2],[93,1],[103,1],[108,2],[111,17],[143,1],[148,2],[151,2],[163,1],[179,1],[191,1],[193,2],[196,13],[215,1],[227,1],[232,1],[250,2],[276,2],[288,1],[290,2],[347,1],[357,1],[375,1],[382,1],[393,1],[400,1],[402,2],[405,9],[438,1],[440,2],[502,1],[514,1],[522,2],[525,26],[552,1],[561,2],[564,6],[571,1],[595,1],[597,2],[648,1],[667,2],[670,6],[677,1]]},"126":{"position":[[0,14],[15,2],[32,7],[125,1],[127,2],[130,1],[132,23],[156,2],[176,2],[179,1],[189,1],[194,2],[197,17],[229,1],[234,2],[237,2],[249,1],[265,1],[277,1],[279,2],[282,13],[301,1],[313,1],[318,1],[336,2],[362,2],[374,1],[376,2],[433,1],[443,1],[461,1],[468,1],[479,1],[486,1],[488,2],[491,9],[524,1],[526,2],[588,1],[600,1],[608,2],[611,26],[638,1],[647,2],[650,6],[657,1],[681,1],[683,2],[734,1],[753,2],[756,6],[763,1],[776,1],[799,2],[805,1],[832,1],[853,2],[861,1],[877,1],[898,1],[931,1],[953,1],[965,1],[984,1],[991,2],[999,1],[1006,1],[1021,1],[1040,2],[1061,1],[1068,2],[1076,1],[1078,1],[1085,2],[1094,1],[1106,1]]},"132":{"position":[[16,2],[135,2],[138,15],[154,30],[185,6]]},"134":{"position":[[162,10],[173,6],[180,6],[187,12],[200,4]]},"136":{"position":[[0,31],[81,48]]},"141":{"position":[[0,2],[518,2],[598,20],[719,2],[722,14],[786,10]]},"143":{"position":[[11,7]]},"145":{"position":[[3,5],[62,24],[87,33],[154,2],[197,4],[202,28]]},"147":{"position":[[0,3],[82,3]]},"155":{"position":[[134,38]]},"159":{"position":[[146,6]]},"161":{"position":[[0,48],[105,15],[121,91]]},"166":{"position":[[3,7],[47,7]]},"168":{"position":[[3,7],[47,7]]},"171":{"position":[[0,5],[6,2],[11,6],[22,2],[39,14],[54,7],[62,9],[74,5],[86,1],[91,1],[95,8],[104,7],[112,18],[131,41],[173,43],[217,45]]},"173":{"position":[[0,5],[6,4],[11,30],[42,6],[49,40],[107,4],[112,1],[114,1]]},"175":{"position":[[0,2],[3,5],[9,8],[18,12],[31,7],[39,4]]},"177":{"position":[[0,51],[52,16]]},"179":{"position":[[0,12],[13,4],[18,10],[29,12],[42,9],[52,63],[116,2],[119,14],[134,13],[148,15],[232,20],[253,2],[256,72],[329,13],[343,43],[387,68],[456,87],[544,14],[559,9],[569,49],[619,12],[632,28],[661,14],[787,55]]},"182":{"position":[[0,7],[8,74],[83,9],[93,90],[184,9],[260,10],[271,46],[318,5],[324,36],[361,16],[378,16],[395,8],[404,33],[467,1],[482,1],[519,3],[552,9],[564,12],[603,9],[615,11],[660,3],[666,10],[677,41],[736,2],[763,5]]},"185":{"position":[[0,51],[52,16],[128,12],[141,4],[146,10],[157,12],[170,9],[180,63],[244,2],[247,14],[262,13],[276,15],[360,20],[381,2],[384,72],[457,13],[471,43],[515,68],[584,87],[672,14],[687,9],[697,49],[747,12],[760,28],[789,14],[915,55]]},"187":{"position":[[0,7],[8,74],[83,9],[93,90],[184,9],[260,10],[271,46],[318,5],[324,36],[361,16],[378,16],[395,8],[404,33],[467,1],[482,1],[519,3],[552,9],[564,12],[603,9],[615,11],[660,3],[666,10],[677,41],[736,2],[763,5]]},"189":{"position":[[0,2],[3,14]]},"193":{"position":[[0,2]]},"195":{"position":[[57,1],[70,1],[80,2],[94,1],[102,1],[106,1],[113,1],[127,1],[132,1],[136,1],[140,1],[145,1],[149,1],[157,2],[164,1],[172,2],[178,1],[185,2],[190,2],[199,1],[201,1],[213,1]]},"197":{"position":[[0,20],[94,1],[99,1],[103,1],[107,1],[112,1],[116,1],[122,2],[125,5],[133,2],[140,1],[165,1],[181,1],[188,1],[205,1],[211,1],[217,1],[222,1],[230,1]]},"199":{"position":[[0,2],[3,93],[97,5],[263,5]]},"201":{"position":[[57,1],[131,1],[139,2],[144,2],[149,2],[155,1],[164,1],[179,2],[182,6],[198,1],[204,1],[206,2],[209,6],[216,1],[218,2],[221,5],[238,1],[245,1],[267,1],[279,2],[297,1],[352,1],[364,2],[369,2],[376,2],[382,1],[391,1],[404,1],[415,2],[428,1],[457,1],[459,1],[471,1]]},"203":{"position":[[0,47]]},"205":{"position":[[0,2]]},"207":{"position":[[57,1],[73,2],[88,1],[98,1],[132,1],[142,1],[158,1],[165,1],[182,1],[188,1],[194,1],[199,1],[213,1],[223,1],[228,2],[234,1],[241,1],[251,1],[269,1],[271,1],[328,2],[334,1],[341,2],[349,2],[368,1],[375,1],[382,2],[388,2],[397,1],[399,1]]},"209":{"position":[[19,19],[71,8],[83,1],[98,1]]},"213":{"position":[[21,2],[58,2],[91,2],[125,2],[128,2],[142,2],[145,4],[160,2],[163,5],[184,2],[187,8],[210,2],[213,7],[221,2],[269,1],[279,2],[299,2]]},"215":{"position":[[19,1],[44,2],[47,7],[67,2],[70,7],[78,2]]},"217":{"position":[[9,1],[31,2],[57,2],[60,6],[78,2],[81,2],[86,2],[101,2],[119,2],[128,2],[131,21]]},"219":{"position":[[46,2],[49,6],[67,2],[70,6],[87,2],[90,6],[106,2],[109,6]]},"221":{"position":[[0,2],[3,6],[77,2],[80,10],[94,4],[102,4],[122,2],[125,5]]},"227":{"position":[[12,2],[15,7],[23,2],[60,2],[63,7],[84,5],[133,2],[171,2],[207,2],[214,6],[238,2],[271,2]]},"229":{"position":[[15,2],[62,1],[241,12]]},"231":{"position":[[28,2],[31,10],[42,2],[45,7],[53,2],[56,2],[59,2],[62,2],[65,2],[84,2],[104,2],[122,2],[139,2],[160,2],[180,2],[191,6]]},"233":{"position":[[0,2],[3,12]]},"238":{"position":[[0,88],[89,62]]},"240":{"position":[[0,115],[116,80],[197,64],[262,47],[310,50]]},"242":{"position":[[0,78],[79,65]]},"246":{"position":[[5,1],[9,4],[23,7],[40,8],[54,1],[62,1],[66,6],[77,4],[87,1],[93,4],[103,1],[109,1],[113,4],[123,1],[129,1],[134,4],[144,1],[149,4],[159,1],[169,1],[173,7],[186,1],[196,1],[200,10],[216,1],[220,3],[229,1],[237,1],[241,3],[248,4],[258,1],[263,5],[273,6],[284,6]]},"249":{"position":[[0,3],[4,2],[7,3],[11,2],[14,3],[18,2],[30,4],[43,6],[58,6],[73,4],[87,4],[101,5],[114,4],[128,5],[143,4],[157,6],[173,6],[203,5],[218,8],[234,4]]},"251":{"position":[[21,3],[25,2],[28,3],[32,2],[35,3],[39,2],[52,4],[63,4],[75,2],[87,3],[97,1],[103,6],[115,2],[125,3],[133,2],[142,2],[165,4]]},"255":{"position":[[0,10],[39,1],[45,1],[75,1],[82,1],[88,1],[95,4],[194,11],[255,7],[344,7],[430,6],[527,1],[557,1],[600,1],[638,1],[675,1],[681,1],[691,1],[697,1],[723,1],[725,1],[768,1],[770,1]]},"258":{"position":[[18,1],[71,2]]},"260":{"position":[[0,10],[34,1],[36,1],[70,12],[118,1],[137,1],[160,1],[186,1],[237,1],[239,1],[301,2],[304,9],[314,8],[345,1],[398,15],[423,1],[457,2],[476,1],[511,2],[514,1],[561,1],[587,8],[610,8],[631,10],[667,1],[695,1],[716,1],[726,1],[750,1],[771,1],[773,8],[813,1],[815,3],[819,3],[823,3],[840,1],[842,1]]},"262":{"position":[[77,1]]},"265":{"position":[[7,1],[18,35],[61,1],[128,1]]},"267":{"position":[[0,6],[24,1],[39,19],[71,1],[86,1],[114,1]]},"269":{"position":[[23,1],[51,1],[58,1],[67,3],[71,1],[78,1],[94,1]]},"271":{"position":[[0,3]]},"273":{"position":[[17,4],[31,9],[41,9],[58,18],[205,1],[222,1],[224,3],[496,17],[525,17],[549,17],[579,17],[739,7],[773,39],[837,1]]},"275":{"position":[[13,30],[44,11],[56,2],[59,2],[121,22],[156,20],[183,15],[208,15],[230,10],[246,15],[344,15],[377,16],[403,15],[428,18],[460,22],[555,10],[566,2],[569,2],[577,17],[609,20],[643,23],[679,2],[684,9],[699,14],[721,8],[732,9],[751,34],[797,20],[820,4],[868,17],[905,20],[941,15],[964,18],[993,18],[1024,25],[1065,25],[1099,22],[1135,29],[1181,29],[1222,22],[1253,12],[1279,13],[1300,12],[1325,13],[1352,24],[1382,8],[1403,9],[1423,24],[1455,35],[1503,16],[1530,5],[1536,2],[1539,2],[1600,10],[1631,11],[1664,11],[1693,16],[1710,5],[1716,2],[1719,2],[1737,11],[1770,10],[1793,28],[1835,4],[1858,9],[1873,2],[1888,4],[1893,8],[1902,2],[1905,2],[1922,19],[1954,20],[1987,26],[2026,23],[2050,5],[2056,2],[2059,2],[2071,17],[2103,4],[2114,2],[2132,9],[2159,9],[2194,10],[2215,9],[2225,4],[2230,2],[2233,2],[2244,13],[2277,18],[2306,11],[2327,12],[2350,9],[2370,11],[2382,8],[2391,2],[2394,2],[2417,19],[2461,10],[2476,13],[2502,13],[2520,13],[2546,13],[2572,16],[2609,16],[2643,23],[2684,23]]},"277":{"position":[[274,2],[323,2],[377,2],[451,2],[454,15],[480,2],[483,5],[498,2],[501,5],[522,2],[543,2],[546,4],[561,2],[564,14],[592,2],[595,6],[612,2],[615,15],[639,2],[642,21],[682,2],[778,2],[843,2],[933,2],[971,2],[1043,2],[1106,2],[1109,10],[1130,2],[1174,2],[1242,2],[1298,2],[1335,2],[1394,2],[1397,13],[1411,2],[1414,1],[1416,23],[1440,2],[1513,1],[1547,2],[1553,1],[1571,1],[1584,1],[1602,2],[1611,1],[1613,1],[1620,2],[1628,2],[1631,1],[1633,2],[1636,1],[1643,2],[1662,1]]},"279":{"position":[[219,2],[222,15],[246,2],[249,21],[287,2],[314,2],[339,2],[342,6],[363,2],[366,6],[382,2],[385,4],[400,2],[403,14],[430,2],[502,2],[565,2],[568,10],[589,2],[592,5],[607,2],[610,5],[626,2],[657,2],[703,2],[799,2],[864,2],[953,2],[987,2]]},"281":{"position":[[329,8],[338,3],[342,2],[354,8],[676,5],[682,4],[687,2],[701,4],[713,10],[724,3],[749,4],[763,4],[776,4],[789,6],[805,6],[822,4],[827,1],[829,5],[861,7],[885,11],[908,11],[928,9],[949,13],[973,17],[991,1],[1000,6],[1033,2],[1104,12],[1126,8],[1149,10],[1176,9]]},"283":{"position":[[149,2],[160,2]]},"285":{"position":[[336,2],[349,2],[359,1],[368,2],[519,2],[664,2],[667,4],[672,2],[681,1],[691,2],[783,1],[798,2],[892,2],[895,4],[900,2],[917,2],[965,2],[1001,2],[1004,4],[1009,2],[1022,2],[1025,6],[1043,2],[1046,6],[1064,2],[1067,6]]},"287":{"position":[[179,2],[182,2],[194,2],[197,2],[209,2],[212,4],[228,2],[253,2],[256,8]]},"289":{"position":[[184,2],[187,3],[200,2],[203,3],[218,2],[221,6],[238,2],[241,6],[259,2],[262,8],[281,2],[284,10]]},"291":{"position":[[124,2],[127,13],[151,2],[154,6],[171,2],[174,10],[195,2],[223,2],[226,25],[268,2],[271,21],[303,2],[306,8],[324,2],[327,16],[362,2],[365,8],[385,2],[388,8],[413,2],[416,23],[453,2],[456,15],[485,2],[488,15],[515,2],[518,19],[547,2],[550,18],[578,2],[581,8],[599,2],[602,8],[627,2],[630,13],[659,2],[662,16]]},"295":{"position":[[11,1],[20,5],[89,11],[108,24],[146,24],[171,2],[187,8],[211,1],[234,2],[240,2],[248,4],[255,1],[298,2],[305,7],[332,1],[336,5],[367,2],[374,6],[402,1],[406,5],[443,2],[446,1],[452,1],[456,5],[498,1],[503,3],[509,1],[515,4],[522,4],[544,5],[559,2],[565,7],[575,6],[592,2],[598,6],[607,7],[626,2],[632,3],[638,8],[657,2],[663,8],[680,2],[683,2],[689,2],[696,6],[716,2],[722,2],[729,9],[750,2],[753,1],[758,10],[783,2],[786,1],[791,2],[798,8],[820,2],[823,1],[828,10],[855,2],[858,1],[888,2],[891,1],[896,11],[921,2],[924,1],[929,2],[936,8],[960,2],[963,2],[980,2],[983,1],[988,12],[1015,1],[1020,2],[1026,2],[1029,1],[1034,7],[1045,1]]},"298":{"position":[[97,1],[103,1],[109,1],[115,1],[117,8],[148,1],[182,15],[207,1],[216,2],[235,1],[245,2],[248,1],[295,1],[321,8],[344,8],[365,10],[401,1],[429,1],[450,1],[452,8],[492,1],[514,1],[558,1],[600,1],[607,1],[689,1],[704,1],[706,8],[742,1],[840,1],[845,1],[854,1],[911,1],[918,1],[956,1],[966,1],[1018,2],[1024,1],[1065,1],[1067,1],[1069,1],[1083,1],[1085,8],[1121,1],[1188,2],[1197,1],[1199,2],[1208,2],[1211,1],[1213,2],[1288,1],[1293,1],[1302,1],[1325,1],[1344,1],[1351,1],[1389,1],[1399,1],[1451,2],[1457,1],[1525,2],[1534,1],[1536,2],[1543,2],[1546,1],[1548,2],[1557,1],[1572,1],[1574,1],[1588,1],[1598,1],[1632,1],[1647,2],[1655,1],[1657,2],[1664,2],[1667,1],[1669,2],[1686,1],[1688,1],[1697,2],[1718,1],[1720,1],[1740,1],[1946,1],[1963,2],[1966,14],[1991,1],[1998,1],[2021,2],[2030,2],[2033,1],[2035,2],[2043,2],[2057,2],[2060,14],[2257,1],[2280,1],[2287,1],[2313,2],[2325,2],[2328,1],[2330,2],[2338,2],[2360,2],[2377,1]]},"300":{"position":[[99,1],[105,1],[111,1],[117,1],[189,8],[220,1],[273,15],[298,1],[332,2],[351,1],[386,2],[389,1],[436,1],[462,8],[485,8],[506,10],[542,1],[570,1],[591,1],[601,1],[625,1],[646,1],[648,8],[688,1],[710,1],[754,1],[847,1],[854,1],[1038,1],[1053,1],[1055,1],[1137,2],[1175,1],[1205,1],[1338,1],[1340,1],[1419,1],[1501,1],[1649,1],[1751,1],[1899,1],[1901,1],[2126,1],[2128,1],[2194,1],[2278,1],[2337,1],[2374,1],[2416,1],[2469,1],[2502,1],[2514,1]]},"302":{"position":[[234,5],[259,5],[310,1],[329,1],[424,8],[448,4],[485,3],[580,4],[660,2],[693,10],[728,2],[764,1],[766,5],[790,1],[806,1],[865,1],[911,1],[955,1],[1004,1],[1025,2],[1031,1],[1043,1],[1045,1],[1058,1],[1092,1],[1094,11],[1106,11],[1118,5],[1124,7],[1132,7],[1267,6],[1283,1],[1289,6],[1347,1],[1352,6],[1410,1],[1415,7],[1495,6],[1531,1],[1553,1],[1565,1],[1577,1],[1603,1],[1635,2],[1659,1],[1771,1],[1773,7],[1788,1],[1832,1],[1834,8],[1851,1],[1856,1],[1863,1],[1865,6],[1893,1],[1898,1],[1900,1],[1913,1],[1915,2],[1988,3],[2004,1],[2070,2],[2100,7],[2151,6],[2184,4],[2204,10],[2262,1],[2328,7],[2368,1],[2370,5],[2427,1],[2451,1],[2477,5],[2494,8],[2503,1],[2510,1],[2563,1],[2621,9],[2631,1],[2633,1],[2635,2],[2638,8],[2647,2],[2650,11],[2663,15],[2702,1],[2715,1],[2795,2],[2868,1],[2897,1],[2926,1],[3030,4],[3038,1],[3109,1],[3132,1],[3160,1],[3198,1],[3200,1],[3212,1]]},"305":{"position":[[197,1],[218,2],[221,10],[278,2],[307,1],[309,2],[357,7],[400,2],[403,13],[456,2],[459,11],[523,2],[526,15],[584,2],[587,15],[614,1],[616,2],[619,10],[676,2],[705,1],[742,2],[745,14],[799,2],[802,11],[866,2],[869,15],[927,2],[930,15],[957,1],[959,2],[962,3],[988,1],[1063,1],[1065,2],[1068,4],[1090,1],[1151,1],[1255,1],[1333,3],[1337,2],[1340,9],[1350,1],[1352,2],[1355,4],[1378,1],[1465,1],[1467,2],[1470,4],[1493,1],[1580,1],[1582,2],[1585,4],[1609,1],[1698,1],[1711,1],[1743,1],[1795,2],[1798,4],[1803,2],[1847,2],[1850,4],[1897,2],[1900,4],[1939,2],[1942,9],[1952,2],[1989,2],[1992,9],[2011,2],[2014,4],[2019,2],[2032,2],[2035,4],[2040,2],[2053,2],[2067,2],[2070,4],[2075,2],[2088,2],[2101,2],[2104,4],[2109,2],[2122,1],[2134,1]]},"307":{"position":[[197,1],[264,1],[445,1],[493,1],[674,1],[698,1],[773,1],[792,1],[853,1],[957,1],[1035,3],[1039,1],[1059,1],[1146,1],[1166,1],[1253,1],[1274,1],[1363,1],[1376,1],[1408,1],[1460,2],[1580,2],[1626,2],[1639,2],[1653,2],[1666,1],[1678,1]]},"309":{"position":[[277,1],[340,2],[350,1],[537,1],[558,1],[570,2],[652,1],[665,1],[827,1]]},"312":{"position":[[0,24],[25,23],[49,5],[55,3]]},"314":{"position":[[0,26],[179,20],[495,17],[801,16]]},"316":{"position":[[0,31],[261,10],[341,1]]},"320":{"position":[[24,1],[37,1],[116,1],[307,1],[352,1]]},"322":{"position":[[22,1],[40,1],[44,1],[52,1],[158,1],[176,1],[180,1],[188,1],[296,1]]},"325":{"position":[[37,19],[89,1],[189,1]]},"327":{"position":[[0,4],[317,2],[353,2],[356,2],[707,2],[719,2],[743,2],[777,2],[803,2],[817,2],[832,2],[880,2],[893,1],[948,2],[1001,1],[1033,1],[1064,1],[1121,4],[1176,2],[1228,2],[1236,1],[1253,2],[1266,2],[1269,2],[1345,2],[1442,1],[1454,1],[1496,1],[1518,1],[1541,1],[1588,1],[1638,1],[1685,1],[1710,1],[1744,1],[1776,1],[1821,1],[1871,1]]},"332":{"position":[[271,1],[494,3],[498,14],[513,32],[546,15]]},"334":{"position":[[0,2]]},"336":{"position":[[106,12],[162,1]]},"338":{"position":[[95,1]]},"340":{"position":[[107,1],[133,2],[136,6]]},"349":{"position":[[0,2],[33,74],[196,62],[393,4],[398,31]]},"351":{"position":[[662,4],[814,18],[833,2],[836,11],[848,5],[870,10],[881,5],[887,27],[915,5]]},"353":{"position":[[110,2],[126,57],[184,39]]},"355":{"position":[[46,18],[65,10],[99,5],[105,1],[107,5],[113,5],[174,2],[229,24],[362,7],[370,2],[403,1],[412,1],[414,26],[502,1],[508,2],[514,1],[663,5],[672,5],[896,1],[919,1],[966,1],[1016,1],[1127,1],[1160,1],[1206,1],[1356,2],[1398,2],[1601,2],[1647,2],[1715,2],[1778,1],[1784,2],[1822,1],[2029,2],[2075,2]]},"357":{"position":[[0,22],[23,12],[88,10],[236,1],[242,2],[260,1]]},"359":{"position":[[0,8],[9,1],[11,8],[20,1],[22,27],[113,1],[186,12],[348,1],[402,5],[414,3],[510,1],[512,32],[545,1],[547,10],[570,68],[639,72],[729,1],[740,41],[790,1],[798,2],[804,1],[806,1],[846,4],[851,11],[863,4],[868,34]]},"361":{"position":[[25,1],[27,1],[29,2],[230,5]]},"363":{"position":[[37,1],[64,1],[129,4],[182,3],[186,8],[510,63]]},"368":{"position":[[0,35]]},"372":{"position":[[89,38],[158,1],[306,1]]},"374":{"position":[[0,2],[3,8]]},"380":{"position":[[72,1],[161,15],[253,1],[297,1],[335,1],[452,1],[564,1]]},"383":{"position":[[139,16]]},"387":{"position":[[0,24],[25,14],[40,32]]},"392":{"position":[[197,1],[249,1],[503,1],[542,1],[771,1],[775,1],[1022,1],[1067,1],[1137,1],[1173,1],[1190,1],[1252,1],[1285,1],[1358,2],[1459,2],[1483,2],[1580,1],[1628,1],[1683,1],[1803,1],[1841,1],[1991,1],[2026,1],[2037,1],[2097,1],[2163,1],[2175,1],[2197,1],[2209,1],[2231,1],[2236,1],[2251,2],[2257,2],[2271,2],[2310,1],[2319,1],[2324,1],[2376,1],[2435,1],[2564,1],[2577,1],[2650,1],[2678,1],[2735,1],[2751,1]]},"398":{"position":[[0,150]]},"400":{"position":[[335,30],[498,38],[537,15],[566,1],[576,1],[593,1],[598,2],[610,1],[614,1],[619,1],[630,1],[634,1]]},"405":{"position":[[83,1],[102,1],[121,1],[141,1],[160,1],[180,1],[209,1],[267,1],[293,1],[323,1],[388,1],[485,1],[518,1],[524,1],[551,1],[556,1],[589,1],[594,1],[606,3],[610,1],[625,1],[714,1],[762,1],[764,8],[775,1],[862,1],[893,2],[930,1],[1041,1],[1069,1],[1097,1],[1130,3],[1203,3],[1209,1],[1250,1],[1291,1],[1324,3],[1406,3]]},"409":{"position":[[85,2],[88,35],[364,1],[368,18],[398,3],[402,8],[413,24],[442,19],[462,3],[466,48],[515,3],[519,51],[582,3],[586,8],[597,31],[631,7],[639,3],[643,14],[660,31],[692,3],[696,47],[746,14],[763,6],[772,19]]},"411":{"position":[[136,5]]},"413":{"position":[[0,2],[3,14]]},"417":{"position":[[0,2]]},"422":{"position":[[29,7]]},"424":{"position":[[30,1],[41,1],[43,3],[47,1],[181,1],[183,22],[206,1],[278,1],[353,1]]},"426":{"position":[[30,1],[152,1],[154,22]]},"428":{"position":[[0,18]]},"430":{"position":[[24,1],[26,4],[183,7],[191,1],[193,1],[195,4],[281,7],[289,1]]},"432":{"position":[[75,13]]},"434":{"position":[[0,16],[26,16]]},"439":{"position":[[148,12],[209,2],[330,5]]},"444":{"position":[[43,6],[56,2],[92,2]]},"446":{"position":[[0,2]]},"450":{"position":[[0,6],[65,1],[148,2],[151,9],[211,1],[412,2],[433,1]]},"452":{"position":[[0,2],[3,24]]},"454":{"position":[[21,20]]},"456":{"position":[[15,18],[67,13],[81,7],[89,10],[127,4],[132,15],[161,11]]},"458":{"position":[[0,7],[8,12]]},"460":{"position":[[0,2],[3,12]]}}}],["0",{"_index":14,"t":{"2":{"position":[[192,14],[476,1],[484,3],[499,3],[612,1],[620,3],[635,3]]},"4":{"position":[[184,14],[468,1],[476,3],[491,3],[604,1],[612,3],[627,3]]},"15":{"position":[[192,14],[476,1],[484,3],[499,3],[612,1],[620,3],[635,3]]},"17":{"position":[[184,14],[468,1],[476,3],[491,3],[604,1],[612,3],[627,3]]},"21":{"position":[[972,2],[996,3]]},"23":{"position":[[962,2],[986,3]]},"25":{"position":[[50,5]]},"44":{"position":[[0,17]]},"101":{"position":[[2156,5]]},"124":{"position":[[105,2],[145,2],[165,2],[244,5],[359,5],[443,33],[600,25]]},"126":{"position":[[191,2],[231,2],[251,2],[330,5],[445,5],[529,33],[686,25],[856,4],[893,4],[994,4],[1103,2]]},"195":{"position":[[115,2],[129,2],[210,2]]},"197":{"position":[[96,2],[167,2],[183,2]]},"199":{"position":[[160,24],[185,3],[256,2],[259,3]]},"201":{"position":[[86,3],[142,1],[152,2],[367,1],[379,2],[468,2]]},"203":{"position":[[48,22]]},"207":{"position":[[90,2],[100,2],[144,2],[160,2],[208,4],[231,2],[365,2]]},"231":{"position":[[114,7],[172,7]]},"269":{"position":[[53,2]]},"277":{"position":[[1605,2],[1659,2]]},"295":{"position":[[253,1],[273,1],[839,1]]},"298":{"position":[[842,2],[913,2],[1021,2],[1290,2],[1346,2],[1454,2],[1993,2],[2282,2],[2374,2]]},"300":{"position":[[2511,2]]},"302":{"position":[[92,1],[121,1],[792,2],[808,2],[3089,3],[3209,2]]},"305":{"position":[[382,2],[724,2],[1007,2],[2131,2]]},"307":{"position":[[283,2],[512,2],[717,2],[1675,2]]},"309":{"position":[[824,2]]},"320":{"position":[[169,14]]},"322":{"position":[[38,1],[46,3],[61,3],[174,1],[182,3],[197,3]]},"355":{"position":[[405,4],[669,2]]},"359":{"position":[[350,5]]},"372":{"position":[[308,1]]},"392":{"position":[[2260,1]]},"394":{"position":[[44,74]]},"400":{"position":[[568,1],[578,1],[601,1]]},"405":{"position":[[958,2],[982,3]]},"424":{"position":[[131,5]]},"426":{"position":[[56,5],[62,5]]}}}],["0.0007",{"_index":1850,"t":{"363":{"position":[[278,6]]}}}],["0.1",{"_index":122,"t":{"21":{"position":[[840,5],[864,4]]},"23":{"position":[[830,5],[854,4]]},"405":{"position":[[826,5],[850,4]]}}}],["0.2",{"_index":125,"t":{"21":{"position":[[869,6]]},"23":{"position":[[859,6]]},"405":{"position":[[855,6]]}}}],["0.25",{"_index":852,"t":{"260":{"position":[[188,5]]}}}],["0.3",{"_index":119,"t":{"21":{"position":[[816,5]]},"23":{"position":[[806,5]]},"405":{"position":[[802,5]]}}}],["0.553y^​=0.4561x1​−0.0007x2​+0.3251x3​+0.0009x4​+0.0001x5​−0.9142x6​−0.553",{"_index":1857,"t":{"363":{"position":[[343,74]]}}}],["0.7",{"_index":123,"t":{"21":{"position":[[846,4]]},"23":{"position":[[836,4]]},"405":{"position":[[832,4]]}}}],["0.8",{"_index":120,"t":{"21":{"position":[[828,4],[851,5]]},"23":{"position":[[818,4],[841,5]]},"405":{"position":[[814,4],[837,5]]}}}],["0.88",{"_index":124,"t":{"21":{"position":[[857,6]]},"23":{"position":[[847,6]]},"405":{"position":[[843,6]]}}}],["0.9",{"_index":118,"t":{"21":{"position":[[811,4],[822,5]]},"23":{"position":[[801,4],[812,5]]},"392":{"position":[[2737,4]]},"405":{"position":[[797,4],[808,5]]}}}],["0.9142",{"_index":1855,"t":{"363":{"position":[[330,6]]}}}],["0.99",{"_index":121,"t":{"21":{"position":[[833,6]]},"23":{"position":[[823,6]]},"405":{"position":[[819,6]]}}}],["01",{"_index":1197,"t":{"295":{"position":[[500,2]]}}}],["012",{"_index":1190,"t":{"295":{"position":[[370,3]]}}}],["0x3f3f3f3f",{"_index":814,"t":{"255":{"position":[[489,10]]},"309":{"position":[[131,10]]}}}],["0xf",{"_index":1186,"t":{"295":{"position":[[301,3]]}}}],["0xff;//d",{"_index":169,"t":{"27":{"position":[[294,8],[1506,8]]}}}],["0将通过从w",{"_index":1844,"t":{"363":{"position":[[25,11]]}}}],["0是手动调节的。我们把这个损失函数叫做l1。请注意,除了w",{"_index":1753,"t":{"355":{"position":[[373,29]]}}}],["0,计算w",{"_index":1824,"t":{"359":{"position":[[63,10]]}}}],["1",{"_index":15,"t":{"2":{"position":[[207,13],[624,1]]},"4":{"position":[[199,13],[616,1]]},"15":{"position":[[207,13],[624,1]]},"17":{"position":[[199,13],[616,1]]},"21":{"position":[[975,3],[1261,4],[1400,4]]},"23":{"position":[[965,3],[1251,4],[1390,4]]},"44":{"position":[[18,12]]},"50":{"position":[[77,57]]},"126":{"position":[[888,2]]},"134":{"position":[[134,16]]},"141":{"position":[[34,7]]},"143":{"position":[[0,2]]},"145":{"position":[[0,2]]},"149":{"position":[[70,2],[190,2]]},"151":{"position":[[84,2]]},"153":{"position":[[91,2]]},"159":{"position":[[100,2]]},"166":{"position":[[0,2]]},"168":{"position":[[0,2]]},"171":{"position":[[20,1],[25,13],[93,1],[263,37]]},"197":{"position":[[63,21]]},"201":{"position":[[200,2],[240,2],[293,3]]},"205":{"position":[[63,1]]},"207":{"position":[[243,2],[331,2],[386,1]]},"221":{"position":[[91,2]]},"231":{"position":[[76,7],[93,10]]},"255":{"position":[[742,3],[772,2],[775,2]]},"273":{"position":[[198,4]]},"277":{"position":[[1597,2]]},"295":{"position":[[264,8],[342,1],[573,1],[605,1],[636,1],[739,1],[769,1],[807,1]]},"298":{"position":[[213,2],[1315,3],[1611,3],[1630,1],[1774,3],[2109,3]]},"300":{"position":[[1645,3],[1747,3]]},"302":{"position":[[76,1],[105,1],[143,1],[1055,2],[2864,3]]},"305":{"position":[[862,3],[923,3],[1935,3],[1985,3]]},"307":{"position":[[617,3],[659,3],[1576,3],[1613,3]]},"309":{"position":[[370,2],[568,1],[797,3]]},"320":{"position":[[132,2],[184,13]]},"322":{"position":[[186,1]]},"327":{"position":[[1564,3],[1628,3],[1799,3]]},"340":{"position":[[87,1]]},"351":{"position":[[38,1]]},"355":{"position":[[500,1],[512,1],[942,3],[1183,3]]},"363":{"position":[[98,30]]},"392":{"position":[[404,2],[407,2],[2233,2],[2288,2],[2321,2],[2437,2]]},"402":{"position":[[22,20]]},"405":{"position":[[961,3],[1243,4],[1382,4]]},"409":{"position":[[362,1],[387,10],[411,1]]}}}],["1)&&(countout(1)==n",{"_index":1292,"t":{"300":{"position":[[1625,19]]}}}],["1))7label",{"_index":1776,"t":{"355":{"position":[[1006,9]]}}}],["1),indegree(0),outdegree(0",{"_index":862,"t":{"260":{"position":[[429,27]]},"300":{"position":[[304,27]]}}}],["1)[1,−1)的索引区间中的元素值都会加1,而对于某次刷漆终点e的下一个索引为e+1的元素值由于−1",{"_index":637,"t":{"203":{"position":[[193,51]]}}}],["1+max(getheight(t[rt].l),getheight(t[rt].r",{"_index":1481,"t":{"305":{"position":[[1017,45]]},"307":{"position":[[727,45]]}}}],["1,0",{"_index":830,"t":{"255":{"position":[[778,4]]}}}],["1,0,1,0,0,1,0",{"_index":828,"t":{"255":{"position":[[727,14]]}}}],["1,0,1,1",{"_index":832,"t":{"255":{"position":[[790,8]]}}}],["1,0,n",{"_index":1510,"t":{"305":{"position":[[1929,5],[1979,5]]},"307":{"position":[[1570,5],[1607,5]]},"309":{"position":[[791,5]]}}}],["1,1,0",{"_index":831,"t":{"255":{"position":[[783,6]]}}}],["1,1,0,1,1",{"_index":833,"t":{"255":{"position":[[799,11]]}}}],["1,l2+1,l2+p2",{"_index":1538,"t":{"309":{"position":[[475,14]]}}}],["1,lb+1,lb+p2",{"_index":1472,"t":{"305":{"position":[[508,14]]},"307":{"position":[[377,14]]}}}],["1,lb,lb+p2",{"_index":1477,"t":{"305":{"position":[[851,10]]},"307":{"position":[[606,10]]}}}],["1.0",{"_index":809,"t":{"255":{"position":[[454,4]]}}}],["1.51",{"_index":202,"t":{"27":{"position":[[876,4]]}}}],["1.vector",{"_index":910,"t":{"273":{"position":[[22,8]]}}}],["10",{"_index":23,"t":{"2":{"position":[[358,3],[762,3]]},"4":{"position":[[350,3],[754,3]]},"15":{"position":[[358,3],[762,3]]},"17":{"position":[[350,3],[754,3]]},"124":{"position":[[229,2]]},"126":{"position":[[315,2]]},"195":{"position":[[142,2],[151,3],[160,3]]},"197":{"position":[[109,2],[118,3],[136,3],[219,2]]},"207":{"position":[[196,2]]},"255":{"position":[[683,3]]},"260":{"position":[[121,3],[287,3]]},"265":{"position":[[144,33]]},"320":{"position":[[335,3]]},"322":{"position":[[324,3]]},"392":{"position":[[850,4],[2742,2]]}}}],["10,3.141590,\"method",{"_index":843,"t":{"260":{"position":[[38,19]]}}}],["10.multimap",{"_index":925,"t":{"273":{"position":[[567,11]]}}}],["100",{"_index":22,"t":{"2":{"position":[[348,4],[353,4],[752,4],[757,4]]},"4":{"position":[[340,4],[345,4],[744,4],[749,4]]},"15":{"position":[[348,4],[353,4],[752,4],[757,4]]},"17":{"position":[[340,4],[345,4],[744,4],[749,4]]},"101":{"position":[[687,4]]},"265":{"position":[[77,42]]},"302":{"position":[[281,3]]},"320":{"position":[[325,4],[330,4]]},"322":{"position":[[314,4],[319,4]]}}}],["1000",{"_index":616,"t":{"195":{"position":[[72,5]]}}}],["1000000007",{"_index":1520,"t":{"309":{"position":[[154,10]]}}}],["1010",{"_index":1191,"t":{"295":{"position":[[390,7],[412,4],[462,4]]}}}],["106",{"_index":416,"t":{"101":{"position":[[957,4]]}}}],["108",{"_index":1679,"t":{"336":{"position":[[164,3]]}}}],["10px",{"_index":2163,"t":{"450":{"position":[[367,5]]}}}],["10的vector,初始化为3",{"_index":680,"t":{"213":{"position":[[24,23]]}}}],["11",{"_index":1419,"t":{"302":{"position":[[2726,2]]}}}],["11.hash_set",{"_index":926,"t":{"273":{"position":[[597,11]]}}}],["1111",{"_index":1187,"t":{"295":{"position":[[321,6]]}}}],["11×1",{"_index":525,"t":{"136":{"position":[[150,18]]}}}],["11×1卷积核,每个filter对上一步的featur",{"_index":1686,"t":{"340":{"position":[[14,28]]}}}],["12",{"_index":812,"t":{"255":{"position":[[474,2]]},"295":{"position":[[381,8]]},"340":{"position":[[109,2]]}}}],["12.hash_multiset",{"_index":928,"t":{"273":{"position":[[629,16]]}}}],["120",{"_index":1955,"t":{"392":{"position":[[780,5]]}}}],["120,210都是30的倍数,由于要找最大的,所以答案是210",{"_index":645,"t":{"205":{"position":[[107,33]]}}}],["13.hash_map",{"_index":929,"t":{"273":{"position":[[666,11]]}}}],["1313×13个grid",{"_index":559,"t":{"143":{"position":[[75,12]]}}}],["14.hash_multimap",{"_index":930,"t":{"273":{"position":[[698,16]]}}}],["148",{"_index":433,"t":{"101":{"position":[[1237,8]]}}}],["149",{"_index":391,"t":{"101":{"position":[[462,8]]}}}],["16",{"_index":1952,"t":{"392":{"position":[[669,3]]}}}],["18446744073709551615ull",{"_index":821,"t":{"255":{"position":[[602,24]]}}}],["1?'\\n",{"_index":1492,"t":{"305":{"position":[[1324,8]]},"307":{"position":[[1026,8]]}}}],["1\\eta",{"_index":1911,"t":{"380":{"position":[[545,10]]}}}],["1\\eta=1η=1",{"_index":1809,"t":{"357":{"position":[[36,12]]}}}],["1][b,c,1,1]的tensor",{"_index":570,"t":{"149":{"position":[[73,43]]}}}],["1][b,c,1,1]的tensor,再送入共享的多层感知机网络进行降维再升维,最后将二者相加再经过sigmoid",{"_index":574,"t":{"151":{"position":[[87,72]]}}}],["1][b,c,1,1]的tensor,该tensor",{"_index":588,"t":{"159":{"position":[[103,37]]}}}],["1],即reduce了dim=1",{"_index":116,"t":{"21":{"position":[[756,19]]},"23":{"position":[[746,19]]},"405":{"position":[[742,19]]}}}],["1_44=fd.img",{"_index":283,"t":{"29":{"position":[[629,12]]},"101":{"position":[[1685,12]]}}}],["1_44=fd_aug.img",{"_index":285,"t":{"29":{"position":[[668,16]]},"101":{"position":[[1724,16]]}}}],["1e",{"_index":811,"t":{"255":{"position":[[471,2]]}}}],["1e5",{"_index":824,"t":{"255":{"position":[[677,3]]}}}],["1e9",{"_index":826,"t":{"255":{"position":[[693,3]]}}}],["1import",{"_index":1771,"t":{"355":{"position":[[849,7]]}}}],["1k−1",{"_index":1708,"t":{"345":{"position":[[117,4]]}}}],["1min、5min",{"_index":2168,"t":{"454":{"position":[[0,20]]}}}],["1}a−1",{"_index":598,"t":{"177":{"position":[[122,5]]},"185":{"position":[[122,5]]}}}],["1}{n",{"_index":449,"t":{"110":{"position":[[300,5]]}}}],["1×11",{"_index":523,"t":{"136":{"position":[[130,12]]},"340":{"position":[[0,6]]}}}],["1×1×3×4=12(3)1",{"_index":1687,"t":{"340":{"position":[[65,14]]}}}],["1ηλ<1",{"_index":1912,"t":{"380":{"position":[[566,15]]}}}],["1−1。这样在所有输入结束后的计算前缀和阶段,在每一个值为[1,−1)[1",{"_index":636,"t":{"203":{"position":[[153,38]]}}}],["1−1加上之前元素所累积的1",{"_index":639,"t":{"203":{"position":[[261,42]]}}}],["1−1而抵消影响(自身值为−1",{"_index":638,"t":{"203":{"position":[[245,15]]}}}],["1−σ)(2",{"_index":1571,"t":{"320":{"position":[[160,8]]}}}],["1−σ)(2)\\frac{{\\rm",{"_index":1569,"t":{"320":{"position":[[77,18]]}}}],["1个filter,其中包含3个kernel。每个kernel分别对输入图像的3",{"_index":1681,"t":{"338":{"position":[[0,55]]}}}],["1,即n0",{"_index":593,"t":{"171":{"position":[[80,5]]}}}],["2",{"_index":131,"t":{"21":{"position":[[990,2],[993,2],[1078,2],[1247,2],[1258,2],[1358,2],[1397,2]]},"23":{"position":[[980,2],[983,2],[1068,2],[1237,2],[1248,2],[1348,2],[1387,2]]},"126":{"position":[[900,2],[945,3]]},"132":{"position":[[5,1]]},"141":{"position":[[783,2]]},"143":{"position":[[8,2]]},"145":{"position":[[194,2]]},"166":{"position":[[44,2]]},"168":{"position":[[44,2]]},"171":{"position":[[72,1]]},"221":{"position":[[99,2]]},"227":{"position":[[188,18]]},"273":{"position":[[207,4]]},"298":{"position":[[1808,3],[2199,3]]},"302":{"position":[[163,1]]},"314":{"position":[[474,20]]},"327":{"position":[[1657,3],[1840,3]]},"351":{"position":[[199,1]]},"357":{"position":[[365,1]]},"405":{"position":[[976,2],[979,2],[1060,2],[1229,2],[1240,2],[1340,2],[1379,2]]}}}],["2)中,当i,ji,ji,j",{"_index":1555,"t":{"314":{"position":[[426,47]]}}}],["2.95.2",{"_index":400,"t":{"101":{"position":[[658,6]]}}}],["2.list",{"_index":911,"t":{"273":{"position":[[51,6]]}}}],["20",{"_index":1338,"t":{"302":{"position":[[354,2]]}}}],["20,\"abc",{"_index":693,"t":{"215":{"position":[[21,11]]}}}],["200",{"_index":289,"t":{"29":{"position":[[743,3]]},"101":{"position":[[1799,3]]}}}],["2001,2003,2004",{"_index":187,"t":{"27":{"position":[[682,14]]}}}],["2003",{"_index":192,"t":{"27":{"position":[[752,5]]}}}],["2004",{"_index":197,"t":{"27":{"position":[[821,5]]}}}],["201",{"_index":643,"t":{"205":{"position":[[65,14],[141,8]]}}}],["201,210,012,021,102,120",{"_index":644,"t":{"205":{"position":[[80,26]]}}}],["201,让数字随意组合,是否能组合出30的倍数,如果能够组合成30",{"_index":642,"t":{"205":{"position":[[3,59]]}}}],["20px",{"_index":2161,"t":{"450":{"position":[[327,5]]}}}],["210",{"_index":646,"t":{"205":{"position":[[150,8]]}}}],["2147483647",{"_index":817,"t":{"255":{"position":[[529,11]]}}}],["235f2db4c261",{"_index":1727,"t":{"349":{"position":[[183,12]]}}}],["256",{"_index":2034,"t":{"392":{"position":[[2652,3]]}}}],["27",{"_index":1683,"t":{"338":{"position":[[97,2]]}}}],["28",{"_index":1939,"t":{"392":{"position":[[410,3],[414,3]]}}}],["2])15cross_loss(predict",{"_index":1783,"t":{"355":{"position":[[1225,24]]}}}],["2])8nllloss(predict",{"_index":1777,"t":{"355":{"position":[[1035,20]]}}}],["2λw",{"_index":1837,"t":{"361":{"position":[[32,17]]}}}],["2型文法(上下文无关语法,cfg",{"_index":338,"t":{"54":{"position":[[0,32]]}}}],["2型文法,又称上下文无关文法(context",{"_index":319,"t":{"44":{"position":[[31,22]]}}}],["2,抹除所有置信度更小的其iou超过阈值的bbox",{"_index":553,"t":{"141":{"position":[[679,39]]}}}],["3",{"_index":83,"t":{"21":{"position":[[132,2],[171,2],[301,3],[519,2],[522,2],[525,4],[534,3],[567,2],[605,2],[610,3],[614,2],[617,2],[882,2],[1081,3],[1250,3],[1254,3],[1361,4]]},"23":{"position":[[122,2],[161,2],[291,3],[509,2],[512,2],[515,4],[524,3],[557,2],[595,2],[600,3],[604,2],[607,2],[872,2],[1071,3],[1240,3],[1244,3],[1351,4]]},"143":{"position":[[19,2]]},"145":{"position":[[231,2]]},"229":{"position":[[64,2]]},"267":{"position":[[26,2]]},"273":{"position":[[214,5]]},"298":{"position":[[1791,3],[2217,3],[2235,3]]},"327":{"position":[[1561,2],[1568,3],[1796,2],[1803,3]]},"336":{"position":[[142,1],[151,1]]},"338":{"position":[[77,1],[93,1]]},"340":{"position":[[96,1]]},"355":{"position":[[939,2],[946,3],[1180,2],[1187,3]]},"405":{"position":[[118,2],[157,2],[287,3],[505,2],[508,2],[511,4],[520,3],[553,2],[591,2],[596,3],[600,2],[603,2],[868,2],[1063,3],[1232,3],[1236,3],[1343,4]]},"409":{"position":[[629,1],[658,1],[744,1],[770,1]]},"450":{"position":[[88,3]]}}}],["3)中的a,ba,ba,b可缩小范围,并不用来实现全连接,此时a,ba,ba,b代表着卷积核的感受野,即kernel",{"_index":1558,"t":{"314":{"position":[[698,97]]}}}],["3.14159);//开辟一个存放单精度数的空间,并指定该实数的初值为//3.14159,将返回的该空间的地址赋给指针变量p",{"_index":895,"t":{"265":{"position":[[197,63]]}}}],["3.141590",{"_index":847,"t":{"260":{"position":[[139,9]]}}}],["3.1中的l1。如果w",{"_index":1843,"t":{"363":{"position":[[0,24]]}}}],["3.dequ",{"_index":912,"t":{"273":{"position":[[77,7]]}}}],["30",{"_index":650,"t":{"207":{"position":[[225,2]]}}}],["300000",{"_index":291,"t":{"29":{"position":[[770,6]]},"101":{"position":[[1826,6]]}}}],["30]的tensor",{"_index":547,"t":{"141":{"position":[[453,17]]}}}],["30]的tensor(包含所有预测框的坐标、置信度和类别结果),通过解析输出的tensor",{"_index":532,"t":{"141":{"position":[[99,51]]}}}],["32",{"_index":1182,"t":{"295":{"position":[[213,3]]}}}],["33×3卷积核,padding=1,stride=1padding=1",{"_index":1674,"t":{"334":{"position":[[100,36]]}}}],["33×3卷积的消融实验发现,7×77",{"_index":580,"t":{"155":{"position":[[96,18]]}}}],["35deg",{"_index":2155,"t":{"450":{"position":[[253,6]]}}}],["3][5,5,3",{"_index":1670,"t":{"334":{"position":[[26,12]]}}}],["3]图像,输出[7",{"_index":531,"t":{"141":{"position":[[85,10]]}}}],["3×33",{"_index":1673,"t":{"334":{"position":[[86,6]]}}}],["3×3×3×4=108(1)3",{"_index":1678,"t":{"336":{"position":[[119,15]]}}}],["3×3××3=27(2)3",{"_index":1682,"t":{"338":{"position":[[56,13]]}}}],["3个损失函数,使用梯度下降优化来求解线性回归模型。回想一下,更新梯度下降中的参数w",{"_index":1763,"t":{"355":{"position":[[678,57]]}}}],["3型文法,又称正规文法(regular",{"_index":326,"t":{"44":{"position":[[245,19]]}}}],["3科成绩(假设年级只有a班和b",{"_index":90,"t":{"21":{"position":[[225,40]]},"23":{"position":[[215,40]]},"405":{"position":[[211,40]]}}}],["4",{"_index":93,"t":{"21":{"position":[[298,2],[878,3],[1302,4],[1419,4]]},"23":{"position":[[288,2],[868,3],[1292,4],[1409,4]]},"76":{"position":[[0,11]]},"295":{"position":[[334,1],[404,1],[454,1]]},"298":{"position":[[1825,3],[2127,3]]},"336":{"position":[[160,1]]},"340":{"position":[[105,1]]},"405":{"position":[[284,2],[864,3],[1284,4],[1401,4]]}}}],["4.stack",{"_index":916,"t":{"273":{"position":[[274,7]]}}}],["40px",{"_index":2164,"t":{"450":{"position":[[407,4]]}}}],["448",{"_index":530,"t":{"141":{"position":[[72,7],[80,4]]}}}],["4][5,5,4]的featur",{"_index":1672,"t":{"334":{"position":[[64,17]]}}}],["4个filter(输出通道为4),每个filter3个kernel(输入通道为3",{"_index":1676,"t":{"336":{"position":[[0,45]]}}}],["5",{"_index":136,"t":{"21":{"position":[[1106,2],[1288,2],[1299,2],[1377,2],[1416,2]]},"23":{"position":[[1096,2],[1278,2],[1289,2],[1367,2],[1406,2]]},"298":{"position":[[1842,3],[1859,3],[2145,3],[2163,3],[2181,3]]},"334":{"position":[[23,2],[61,2]]},"390":{"position":[[97,40]]},"392":{"position":[[773,1],[777,2],[2254,2]]},"405":{"position":[[1088,2],[1270,2],[1281,2],[1359,2],[1398,2]]}}}],["5.queue",{"_index":918,"t":{"273":{"position":[[334,7]]}}}],["6",{"_index":100,"t":{"21":{"position":[[419,3],[1109,3],[1291,3],[1295,3],[1380,4]]},"23":{"position":[[409,3],[1099,3],[1281,3],[1285,3],[1370,4]]},"29":{"position":[[1044,8]]},"298":{"position":[[1876,3],[1893,3]]},"302":{"position":[[2724,1]]},"392":{"position":[[571,2]]},"405":{"position":[[405,3],[1091,3],[1273,3],[1277,3],[1362,4]]},"409":{"position":[[366,1],[571,10],[595,1],[761,1]]}}}],["6.priority_queu",{"_index":920,"t":{"273":{"position":[[431,16]]}}}],["6层encod",{"_index":512,"t":{"134":{"position":[[16,11]]}}}],["7",{"_index":511,"t":{"134":{"position":[[0,15]]},"141":{"position":[[96,2],[450,2]]},"255":{"position":[[699,2]]},"298":{"position":[[1910,3],[1927,3]]},"327":{"position":[[1572,2],[1807,2]]},"355":{"position":[[950,2],[1191,2]]}}}],["7.14",{"_index":1354,"t":{"302":{"position":[[687,5]]}}}],["7.28",{"_index":1391,"t":{"302":{"position":[[1982,5],[2717,6]]}}}],["7.set",{"_index":922,"t":{"273":{"position":[[490,5]]}}}],["700",{"_index":2166,"t":{"450":{"position":[[428,4]]}}}],["77×7",{"_index":581,"t":{"155":{"position":[[122,11]]}}}],["77×7卷积与3×33",{"_index":579,"t":{"155":{"position":[[77,11]]}}}],["77×7卷积学习特征并降维,最后送入sigmoid",{"_index":577,"t":{"153":{"position":[[141,40]]}}}],["7×77",{"_index":578,"t":{"155":{"position":[[0,69]]}}}],["7个损失项是最终融合得到的featur",{"_index":517,"t":{"134":{"position":[[86,21]]}}}],["8",{"_index":280,"t":{"29":{"position":[[610,1]]},"101":{"position":[[1666,1]]},"231":{"position":[[0,10]]}}}],["8.multiset",{"_index":923,"t":{"273":{"position":[[514,10]]}}}],["84",{"_index":1957,"t":{"392":{"position":[[817,4]]}}}],["9",{"_index":108,"t":{"21":{"position":[[584,2],[587,2],[590,4]]},"23":{"position":[[574,2],[577,2],[580,4]]},"195":{"position":[[108,2]]},"327":{"position":[[1575,4],[1810,4]]},"405":{"position":[[570,2],[573,2],[576,4]]}}}],["9.map",{"_index":924,"t":{"273":{"position":[[543,5]]}}}],["9223372036854775807ll",{"_index":819,"t":{"255":{"position":[[559,22]]}}}],["92540646808111039ll",{"_index":822,"t":{"255":{"position":[[640,20]]}}}],["9999",{"_index":617,"t":{"195":{"position":[[83,5]]}}}],["9]])14label",{"_index":1782,"t":{"355":{"position":[[1194,11]]}}}],["9]])6predict",{"_index":1775,"t":{"355":{"position":[[953,12]]}}}],["__init__(self",{"_index":1934,"t":{"392":{"position":[[311,15],[447,15]]}}}],["__stack_chk_fail",{"_index":430,"t":{"101":{"position":[[1124,18]]}}}],["a(10,3",{"_index":679,"t":{"213":{"position":[[12,8]]}}}],["a(4,3),b(3,4",{"_index":689,"t":{"213":{"position":[[249,14]]}}}],["a)move(t,a",{"_index":335,"t":{"50":{"position":[[309,11]]}}}],["a,ba,ba,b",{"_index":1552,"t":{"314":{"position":[[200,40]]}}}],["a,const",{"_index":888,"t":{"262":{"position":[[64,7]]}}}],["a.argmax(dim=0",{"_index":128,"t":{"21":{"position":[[910,16]]},"23":{"position":[[900,16]]},"405":{"position":[[896,16]]}}}],["a.argmax(dim=1",{"_index":129,"t":{"21":{"position":[[927,16]]},"23":{"position":[[917,16]]},"405":{"position":[[913,16]]}}}],["a.assign(b.begin",{"_index":1078,"t":{"279":{"position":[[1009,19]]}}}],["a.assign(n",{"_index":1076,"t":{"279":{"position":[[970,11]]}}}],["a.back",{"_index":686,"t":{"213":{"position":[[150,9]]},"219":{"position":[[77,9]]},"225":{"position":[[57,9]]},"279":{"position":[[598,8]]}}}],["a.begin",{"_index":1061,"t":{"279":{"position":[[209,9]]}}}],["a.clear",{"_index":684,"t":{"213":{"position":[[114,10]]},"217":{"position":[[67,10]]},"225":{"position":[[35,10]]},"279":{"position":[[555,9]]}}}],["a.empti",{"_index":683,"t":{"213":{"position":[[80,10]]},"217":{"position":[[46,10]]},"219":{"position":[[24,10]]},"225":{"position":[[24,10]]},"279":{"position":[[390,9]]}}}],["a.end",{"_index":1062,"t":{"279":{"position":[[238,7]]}}}],["a.erase(first",{"_index":1073,"t":{"279":{"position":[[882,14]]}}}],["a.erase(it",{"_index":1072,"t":{"279":{"position":[[852,11]]}}}],["a.erase({\"1\",1",{"_index":731,"t":{"229":{"position":[[86,17]]}}}],["a.find({\"1\",1",{"_index":732,"t":{"229":{"position":[[104,16]]}}}],["a.first",{"_index":694,"t":{"215":{"position":[[33,10]]}}}],["a.front",{"_index":685,"t":{"213":{"position":[[131,10]]},"219":{"position":[[56,10]]},"225":{"position":[[46,10]]},"279":{"position":[[579,9]]}}}],["a.h)<(b.h",{"_index":890,"t":{"262":{"position":[[89,13]]}}}],["a.insert(it",{"_index":1071,"t":{"279":{"position":[[685,12],[727,12],[773,12]]}}}],["a.insert({\"1\",1",{"_index":730,"t":{"229":{"position":[[67,18]]}}}],["a.merge(b",{"_index":1069,"t":{"279":{"position":[[646,10]]}}}],["a.pop",{"_index":702,"t":{"219":{"position":[[97,8]]}}}],["a.pop_back",{"_index":688,"t":{"213":{"position":[[196,13]]},"225":{"position":[[82,13]]},"279":{"position":[[326,12]]}}}],["a.pop_front",{"_index":1065,"t":{"279":{"position":[[349,13]]}}}],["a.push(1",{"_index":701,"t":{"219":{"position":[[35,10]]}}}],["a.push_back",{"_index":687,"t":{"213":{"position":[[169,14]]},"225":{"position":[[67,14]]}}}],["a.push_back(x",{"_index":1064,"t":{"279":{"position":[[299,14]]}}}],["a.push_front(x",{"_index":1063,"t":{"279":{"position":[[271,15]]}}}],["a.remove(x",{"_index":1074,"t":{"279":{"position":[[941,11]]}}}],["a.resize(n",{"_index":1066,"t":{"279":{"position":[[418,11],[485,11]]}}}],["a.second",{"_index":695,"t":{"215":{"position":[[55,11]]}}}],["a.siz",{"_index":681,"t":{"213":{"position":[[48,9]]},"217":{"position":[[21,9]]},"219":{"position":[[14,9]]},"225":{"position":[[14,9]]},"279":{"position":[[373,8]]}}}],["a.swap(v",{"_index":1067,"t":{"279":{"position":[[616,9]]}}}],["a<0",{"_index":1383,"t":{"302":{"position":[[1630,4]]}}}],["a[\"2",{"_index":729,"t":{"229":{"position":[[55,6]]}}}],["abcdef",{"_index":1420,"t":{"302":{"position":[[2729,6]]}}}],["acc",{"_index":1869,"t":{"372":{"position":[[233,63]]},"392":{"position":[[1765,5],[1777,6],[2498,3],[2524,3]]}}}],["acc=∑i(predi==yi)len(y)(1)acc",{"_index":1866,"t":{"372":{"position":[[128,29]]}}}],["accur",{"_index":1715,"t":{"345":{"position":[[321,8]]}}}],["accuraci",{"_index":1870,"t":{"372":{"position":[[313,8]]},"392":{"position":[[1254,13]]}}}],["accuracy(y_hat",{"_index":1978,"t":{"392":{"position":[[1301,15],[2111,15]]}}}],["accuracy作为数学上的训练方法,即在训练过程中不使用与acc",{"_index":1865,"t":{"372":{"position":[[47,41]]}}}],["acc并无变化,出现梯度为0",{"_index":1873,"t":{"372":{"position":[[353,63]]}}}],["acc,但并不会将maxim",{"_index":1864,"t":{"372":{"position":[[0,46]]}}}],["aco",{"_index":808,"t":{"255":{"position":[[448,5]]}}}],["action=report",{"_index":303,"t":{"29":{"position":[[1010,13]]},"101":{"position":[[2066,13]]}}}],["acw",{"_index":697,"t":{"217":{"position":[[11,9]]}}}],["addedge(int",{"_index":878,"t":{"260":{"position":[[787,11]]},"298":{"position":[[466,11]]},"300":{"position":[[662,11]]}}}],["adio",{"_index":174,"t":{"27":{"position":[[394,6],[1606,6]]}}}],["adjacent_find",{"_index":936,"t":{"275":{"position":[[62,13]]}}}],["adjacent)的等价(ident",{"_index":937,"t":{"275":{"position":[[76,32]]}}}],["adjlist",{"_index":1351,"t":{"302":{"position":[[636,7]]}}}],["adjlist[max_vertex_num",{"_index":1348,"t":{"302":{"position":[[555,24]]}}}],["adjv",{"_index":1343,"t":{"302":{"position":[[418,5],[2605,6]]}}}],["adjv=b;p",{"_index":1387,"t":{"302":{"position":[[1723,9]]}}}],["adjv]==0",{"_index":1414,"t":{"302":{"position":[[2577,10]]}}}],["ai",{"_index":2055,"t":{"402":{"position":[[0,21]]}}}],["ai论文】yolo",{"_index":527,"t":{"141":{"position":[[3,17]]}}}],["alexnet是指2012年由alex",{"_index":1913,"t":{"383":{"position":[[0,19]]}}}],["algorithm",{"_index":768,"t":{"251":{"position":[[42,9]]},"300":{"position":[[9,11]]}}}],["algraph",{"_index":1353,"t":{"302":{"position":[[650,9],[2816,7]]}}}],["all_ofc++11",{"_index":938,"t":{"275":{"position":[[109,11]]}}}],["all_proxi",{"_index":2100,"t":{"430":{"position":[[228,9]]}}}],["all_proxy=socks5://127.0.0.1:7890",{"_index":2095,"t":{"430":{"position":[[59,33]]}}}],["alpha",{"_index":304,"t":{"40":{"position":[[0,66],[116,6]]},"44":{"position":[[160,6]]}}}],["alpha,\\spac",{"_index":308,"t":{"40":{"position":[[102,13]]}}}],["alt",{"_index":747,"t":{"246":{"position":[[105,3],[125,3]]}}}],["anchor",{"_index":557,"t":{"143":{"position":[[22,7]]}}}],["anchor宽高比的聚类,聚类数越大,覆盖的i",{"_index":563,"t":{"143":{"position":[[150,43]]}}}],["anchor是通过k",{"_index":562,"t":{"143":{"position":[[115,10]]}}}],["anim",{"_index":1996,"t":{"392":{"position":[[1674,8]]}}}],["animator.add(epoch",{"_index":2022,"t":{"392":{"position":[[2291,18],[2416,18]]}}}],["announcementbar",{"_index":2149,"t":{"450":{"position":[[48,16],[71,16]]}}}],["any_ofc++11",{"_index":939,"t":{"275":{"position":[[144,11]]}}}],["append(),push_back",{"_index":1101,"t":{"281":{"position":[[728,20]]}}}],["applic",{"_index":2119,"t":{"439":{"position":[[41,11],[88,11]]}}}],["arch系用户通过以下命令即可完成bochs和nasm",{"_index":381,"t":{"99":{"position":[[55,31]]}}}],["arcnod",{"_index":1342,"t":{"302":{"position":[[405,8],[460,7],[475,9],[528,7],[1187,7],[1661,8],[2286,7]]}}}],["arcnum",{"_index":1357,"t":{"302":{"position":[[782,7],[1402,7],[1517,8]]}}}],["argmax",{"_index":110,"t":{"21":{"position":[[626,12],[946,6]]},"23":{"position":[[616,12],[936,6]]},"405":{"position":[[612,12],[932,6]]}}}],["argmin",{"_index":111,"t":{"21":{"position":[[641,6]]},"23":{"position":[[631,6]]},"405":{"position":[[627,6]]}}}],["arr_size(a",{"_index":797,"t":{"255":{"position":[[214,11]]}}}],["asciicod",{"_index":168,"t":{"27":{"position":[[272,9],[481,11],[1484,9],[1696,11]]}}}],["asciicode=='d')//ctrl+d",{"_index":172,"t":{"27":{"position":[[350,23],[1562,23]]}}}],["assert.h",{"_index":752,"t":{"249":{"position":[[21,8]]}}}],["assign",{"_index":1100,"t":{"281":{"position":[[690,10]]}}}],["attention应运而生,允许每个位置关注到序列中地所有其他位置。这种全局关联性质使得transform",{"_index":2069,"t":{"411":{"position":[[340,71]]}}}],["augment",{"_index":1920,"t":{"387":{"position":[[157,12]]}}}],["auto",{"_index":493,"t":{"126":{"position":[[1012,5]]}}}],["averag",{"_index":1713,"t":{"345":{"position":[[294,8]]}}}],["ax+b",{"_index":1832,"t":{"359":{"position":[[731,8]]}}}],["ax+bx",{"_index":1834,"t":{"359":{"position":[[792,5]]}}}],["a∈vn",{"_index":321,"t":{"44":{"position":[[109,5],[222,6]]}}}],["a与另一个list",{"_index":1068,"t":{"279":{"position":[[629,16]]}}}],["a中与范围b",{"_index":944,"t":{"275":{"position":[[271,23]]}}}],["a中所有值为x",{"_index":1075,"t":{"279":{"position":[[956,13]]}}}],["a中查找第一个与范围b",{"_index":953,"t":{"275":{"position":[[490,23]]}}}],["a中的所有元素替换成n个val",{"_index":1077,"t":{"279":{"position":[[990,18]]}}}],["a中第一个与范围b",{"_index":946,"t":{"275":{"position":[[309,26]]}}}],["a变成b",{"_index":1080,"t":{"279":{"position":[[1038,7]]}}}],["a是a班4位同学3科成绩,b是这4名同学其他3门课的成绩,拼接后代表这4名同学的6",{"_index":97,"t":{"21":{"position":[[339,47]]},"23":{"position":[[329,47]]},"405":{"position":[[325,47]]}}}],["a班4位同学,每位同学3",{"_index":84,"t":{"21":{"position":[[137,15]]},"23":{"position":[[127,15]]},"405":{"position":[[123,15]]}}}],["a而言,假设有一组互斥且穷尽的条件事件b,则事件a的概率等于事件a",{"_index":602,"t":{"182":{"position":[[194,65]]},"187":{"position":[[194,65]]}}}],["a,若存在方阵b使得ab=ba=单位方阵i,则方阵b为方阵a的逆矩阵,记为a−1a",{"_index":597,"t":{"177":{"position":[[69,52]]},"185":{"position":[[69,52]]}}}],["b",{"_index":85,"t":{"21":{"position":[[153,1],[212,3],[326,3],[530,1],[1085,1],[1124,2],[1266,1],[1318,2]]},"23":{"position":[[143,1],[202,3],[316,3],[520,1],[1075,1],[1114,2],[1256,1],[1308,2]]},"182":{"position":[[550,1],[613,1],[664,1]]},"187":{"position":[[550,1],[613,1],[664,1]]},"201":{"position":[[94,2],[122,3],[136,2]]},"213":{"position":[[271,2]]},"265":{"position":[[125,2]]},"298":{"position":[[89,2],[99,3],[111,3]]},"300":{"position":[[91,2],[101,3],[113,3]]},"302":{"position":[[1601,1],[2780,1],[2787,2],[2892,2]]},"353":{"position":[[113,2]]},"359":{"position":[[149,2]]},"378":{"position":[[48,2]]},"380":{"position":[[69,2],[250,2]]},"400":{"position":[[574,1],[628,1],[632,1]]},"405":{"position":[[139,1],[198,3],[312,3],[516,1],[1067,1],[1106,2],[1248,1],[1300,2]]}}}],["b){return",{"_index":889,"t":{"262":{"position":[[79,9]]}}}],["b)}{\\partial",{"_index":1903,"t":{"380":{"position":[[319,12]]}}}],["b,a",{"_index":1421,"t":{"302":{"position":[[2736,3]]}}}],["b,c,h,w][b",{"_index":568,"t":{"149":{"position":[[0,23],[117,29]]},"151":{"position":[[0,17]]},"153":{"position":[[0,17]]}}}],["b,d",{"_index":1422,"t":{"302":{"position":[[2740,3]]}}}],["b.end",{"_index":1079,"t":{"279":{"position":[[1029,8]]}}}],["b<0",{"_index":1384,"t":{"302":{"position":[[1638,4]]}}}],["b[i],e[i](0<=b[i]<=e[i]<=200000",{"_index":621,"t":{"199":{"position":[[103,56]]}}}],["b_t)}{\\partial",{"_index":1908,"t":{"380":{"position":[[481,14]]}}}],["background",{"_index":2154,"t":{"450":{"position":[[213,11]]}}}],["backward",{"_index":74,"t":{"9":{"position":[[278,13]]}}}],["base",{"_index":1326,"t":{"302":{"position":[[45,4],[58,4]]}}}],["bash的配置文件:~/.bashrc",{"_index":2105,"t":{"432":{"position":[[55,19]]}}}],["basic",{"_index":1697,"t":{"343":{"position":[[108,5]]}}}],["batch",{"_index":68,"t":{"9":{"position":[[140,5]]},"392":{"position":[[1910,6],[2224,6],[2265,5],[2312,6]]}}}],["batch_siz",{"_index":2033,"t":{"392":{"position":[[2639,10]]}}}],["batchsize越小,收敛效果越好。随机梯度下降理论上带来了噪音,batchs",{"_index":71,"t":{"9":{"position":[[164,68]]}}}],["batch上计算损失函数以及梯度,近似损失。此时,batchs",{"_index":65,"t":{"9":{"position":[[0,96]]}}}],["batch中有大量样本均存在这种情况,此时acc有显著提升而网络的权重的更新极小,此时,与acc有关的loss",{"_index":1878,"t":{"372":{"position":[[489,129]]}}}],["batch数据,即mini",{"_index":67,"t":{"9":{"position":[[126,13]]}}}],["batteri",{"_index":2132,"t":{"439":{"position":[[201,7]]}}}],["bbb",{"_index":1895,"t":{"378":{"position":[[181,10]]},"400":{"position":[[178,38]]}}}],["bbox与其他所有置信度更小的bbox做iou判断,若iou大于设置的阈值,则抹除置信度小的bbox",{"_index":552,"t":{"141":{"position":[[619,59]]}}}],["bbox包含(x",{"_index":540,"t":{"141":{"position":[[289,11]]}}}],["bbox的置信度与其父grid",{"_index":550,"t":{"141":{"position":[[547,17]]}}}],["bbox都会在loss",{"_index":554,"t":{"141":{"position":[[737,19]]}}}],["bc",{"_index":2169,"t":{"456":{"position":[[0,14]]}}}],["be",{"_index":388,"t":{"101":{"position":[[330,5]]},"345":{"position":[[209,5]]}}}],["beg,end]内所有字符作为字符串",{"_index":1099,"t":{"281":{"position":[[649,26]]}}}],["begin",{"_index":632,"t":{"201":{"position":[[303,6],[337,7],[357,6],[406,6]]}}}],["begin(),end",{"_index":1116,"t":{"281":{"position":[[1135,13]]}}}],["begin[i]+1",{"_index":624,"t":{"199":{"position":[[288,37]]}}}],["begin[i],end[i](0<=begin[i]<=end[i]<=200000",{"_index":622,"t":{"199":{"position":[[189,66]]}}}],["begin{cas",{"_index":31,"t":{"2":{"position":[[462,13],[598,13]]},"4":{"position":[[454,13],[590,13]]},"15":{"position":[[462,13],[598,13]]},"17":{"position":[[454,13],[590,13]]},"322":{"position":[[24,13],[160,13]]}}}],["behind",{"_index":1699,"t":{"343":{"position":[[119,6]]}}}],["beta",{"_index":316,"t":{"42":{"position":[[43,5],[82,16]]},"44":{"position":[[183,5]]}}}],["beta,\\spac",{"_index":315,"t":{"42":{"position":[[30,12]]},"44":{"position":[[147,12]]}}}],["better",{"_index":2137,"t":{"439":{"position":[[310,6]]}}}],["bfs(int",{"_index":1221,"t":{"298":{"position":[[727,7]]},"305":{"position":[[1078,7]]},"307":{"position":[[780,7]]}}}],["bfs(rt",{"_index":1512,"t":{"305":{"position":[[2002,8]]},"307":{"position":[[1617,8]]}}}],["bia",{"_index":1743,"t":{"353":{"position":[[116,9]]}}}],["big",{"_index":1902,"t":{"380":{"position":[[291,5]]},"437":{"position":[[101,3]]}}}],["big(l(w",{"_index":1901,"t":{"380":{"position":[[240,9]]}}}],["binari",{"_index":62,"t":{"6":{"position":[[303,6]]},"19":{"position":[[303,6]]}}}],["binary_search",{"_index":996,"t":{"275":{"position":[[1908,13]]}}}],["bit",{"_index":771,"t":{"251":{"position":[[152,4]]}}}],["bitbit",{"_index":1181,"t":{"295":{"position":[[65,6],[101,6]]}}}],["bits/stdc++.h",{"_index":481,"t":{"126":{"position":[[49,15]]},"195":{"position":[[9,15]]},"201":{"position":[[9,15]]},"207":{"position":[[9,15]]}}}],["bitset",{"_index":678,"t":{"211":{"position":[[294,9]]},"251":{"position":[[145,6]]},"295":{"position":[[0,10],[26,13]]}}}],["bitset<10000",{"_index":734,"t":{"231":{"position":[[11,13]]}}}],["bitset>n>>m",{"_index":1272,"t":{"300":{"position":[[1086,10]]}}}],["cin>>row",{"_index":897,"t":{"267":{"position":[[29,9]]}}}],["cin>>row>>col",{"_index":903,"t":{"269":{"position":[[0,14]]}}}],["cin>>src>>dst",{"_index":1276,"t":{"300":{"position":[[1140,14]]}}}],["ci×h×wc_i",{"_index":1655,"t":{"332":{"position":[[0,12]]}}}],["class",{"_index":64,"t":{"6":{"position":[[322,5]]},"19":{"position":[[322,5]]},"392":{"position":[[276,5],[418,5]]}}}],["classifi",{"_index":2053,"t":{"400":{"position":[[656,10]]}}}],["clear",{"_index":1104,"t":{"281":{"position":[[781,7]]}}}],["clock",{"_index":2138,"t":{"439":{"position":[[324,5]]}}}],["closure(t)\\epsilon",{"_index":336,"t":{"50":{"position":[[321,22]]}}}],["closure(t)ϵ−closure(t",{"_index":337,"t":{"50":{"position":[[345,22]]}}}],["cloud",{"_index":465,"t":{"118":{"position":[[157,7]]}}}],["cnn",{"_index":2064,"t":{"411":{"position":[[0,23]]}}}],["cnn中没有全连接层时,本质上可以接受任意尺寸的输入,但这是狭隘的。若考虑其下游任务以及输出,如fcn(fulli",{"_index":2058,"t":{"409":{"position":[[124,68]]}}}],["cnn使用卷积层通过滑动卷积核在输入上进行局部感受野的操作。每个神经元只与输入的一小部分区域相连,这意味着每个神经元只能接触到局部的上下文信息。这样的设计使得cnn",{"_index":2065,"t":{"411":{"position":[[24,111]]}}}],["cnn在面临长输入序列时不能很好地综合上下文信息、提取位置信息,因此self",{"_index":2068,"t":{"411":{"position":[[288,51]]}}}],["cnn本质上可以接受任意通道数的图像输入,但是其模型效果将会受到极大的影响。以一个使用通道数为3的数据集进行训练的cnn",{"_index":2061,"t":{"409":{"position":[[278,83]]}}}],["cnn模型的输入向量的形状是固定的,其输出向量的形状也是固定的或可以根据不同的下游任务而唯一确定,即输入形状与下游任务共同确定了一个cnn",{"_index":2057,"t":{"409":{"position":[[0,84]]}}}],["cnn的参数共享使得模型能够学习到图像中的局部特征,这也是一种对于上下文的假设。相邻位置上的权重共享使得模型能够对局部结构进行建模,并且这种权重共享使得cnn",{"_index":2066,"t":{"411":{"position":[[142,89]]}}}],["cnn的设计理念认为:在图像任务中,局部结构通常更为重要,局部连接和权值共享使得cnn",{"_index":2067,"t":{"411":{"position":[[232,55]]}}}],["cnt",{"_index":1433,"t":{"302":{"position":[[2811,4],[2985,6]]}}}],["cnt0−1w<0\\frac{d|w|}{d",{"_index":1754,"t":{"355":{"position":[[441,30]]}}}],["e",{"_index":5,"t":{"2":{"position":[[70,3]]},"4":{"position":[[62,3]]},"15":{"position":[[70,3]]},"17":{"position":[[62,3]]},"201":{"position":[[97,2],[126,4],[147,1]]},"246":{"position":[[64,1]]},"302":{"position":[[307,2],[325,3],[952,2],[957,12],[1028,2]]},"320":{"position":[[39,3]]},"359":{"position":[[808,37]]},"430":{"position":[[181,1],[279,1]]}}}],["e,a",{"_index":1428,"t":{"302":{"position":[[2764,3]]}}}],["each",{"_index":1705,"t":{"343":{"position":[[285,4]]},"345":{"position":[[3,4],[199,4],[267,4]]}}}],["echo",{"_index":252,"t":{"27":{"position":[[2179,4]]},"430":{"position":[[175,4],[273,4]]}}}],["edit",{"_index":268,"t":{"29":{"position":[[412,4]]},"101":{"position":[[1468,4]]}}}],["elf_i386",{"_index":427,"t":{"101":{"position":[[1088,8]]}}}],["empti",{"_index":1109,"t":{"281":{"position":[[920,7]]}}}],["enabled=0",{"_index":293,"t":{"29":{"position":[[784,9],[812,9],[839,9]]},"101":{"position":[[1840,9],[1868,9],[1895,9]]}}}],["encod",{"_index":506,"t":{"132":{"position":[[192,19]]}}}],["encoder阶段,每个block之后使用maxpool",{"_index":503,"t":{"132":{"position":[[41,37]]}}}],["encrypt",{"_index":592,"t":{"164":{"position":[[22,10]]},"166":{"position":[[33,10],[77,10]]},"168":{"position":[[33,10],[77,10]]}}}],["end",{"_index":633,"t":{"201":{"position":[[310,4],[345,6],[372,3],[418,4]]},"227":{"position":[[136,22]]},"273":{"position":[[164,5]]},"400":{"position":[[639,3]]}}}],["end[i",{"_index":623,"t":{"199":{"position":[[269,18]]}}}],["endif",{"_index":1331,"t":{"302":{"position":[[203,6]]}}}],["endl",{"_index":496,"t":{"126":{"position":[[1088,5]]},"195":{"position":[[193,5]]},"207":{"position":[[352,5],[391,5]]},"217":{"position":[[122,5]]},"277":{"position":[[1646,5]]},"298":{"position":[[1700,5],[2046,5],[2341,5]]}}}],["end{aligned}\\right",{"_index":1802,"t":{"355":{"position":[[1828,20]]}}}],["end{aligned}l​=(y^​−y)2=(wx+b−y)2",{"_index":1750,"t":{"355":{"position":[[193,35]]}}}],["end{aligned}wnew",{"_index":1791,"t":{"355":{"position":[[1430,17],[1849,17],[2119,17]]}}}],["end{align}h(p",{"_index":1630,"t":{"327":{"position":[[1313,15]]}}}],["end{align}h(p)​=−i∑n​pi",{"_index":1597,"t":{"327":{"position":[[411,25]]}}}],["end{array}\\right.dwd∣w∣​={1−1​w>0w<0",{"_index":1758,"t":{"355":{"position":[[520,38]]}}}],["end{array}\\right.wnew",{"_index":1818,"t":{"357":{"position":[[266,22]]}}}],["end{cas",{"_index":33,"t":{"2":{"position":[[503,11],[639,11]]},"4":{"position":[[495,11],[631,11]]},"15":{"position":[[503,11],[639,11]]},"17":{"position":[[495,11],[631,11]]},"322":{"position":[[65,11],[201,11]]}}}],["entri",{"_index":184,"t":{"27":{"position":[[654,5],[1737,5]]}}}],["entropi",{"_index":60,"t":{"6":{"position":[[284,7]]},"19":{"position":[[284,7]]}}}],["entropy),是描述两个概率分布p和q",{"_index":1603,"t":{"327":{"position":[[539,29]]}}}],["entropy中的entropi",{"_index":1580,"t":{"327":{"position":[[11,50]]}}}],["entrpoy",{"_index":1585,"t":{"327":{"position":[[97,7]]}}}],["enumerate(train_it",{"_index":2008,"t":{"392":{"position":[[1927,22]]}}}],["enum{dg",{"_index":1339,"t":{"302":{"position":[[365,8]]}}}],["ep",{"_index":810,"t":{"255":{"position":[[467,3]]}}}],["epoch",{"_index":2006,"t":{"392":{"position":[[1878,5]]},"424":{"position":[[313,8]]}}}],["equal",{"_index":941,"t":{"275":{"position":[[224,5]]}}}],["equal_rang",{"_index":997,"t":{"275":{"position":[[1942,11]]}}}],["eras",{"_index":1103,"t":{"281":{"position":[[768,7]]}}}],["errno.h",{"_index":754,"t":{"249":{"position":[[50,7]]}}}],["error",{"_index":47,"t":{"6":{"position":[[13,5]]},"19":{"position":[[13,5]]},"101":{"position":[[347,6]]},"302":{"position":[[115,5],[1329,6],[1392,6],[1650,6]]}}}],["estim",{"_index":1716,"t":{"345":{"position":[[330,8]]}}}],["eta",{"_index":1766,"t":{"355":{"position":[[783,4],[1363,4],[1405,4],[1608,4],[1654,4],[1745,4],[1789,4],[2036,4],[2082,4]]},"380":{"position":[[435,4],[454,4]]}}}],["euclidean",{"_index":1732,"t":{"351":{"position":[[232,9]]}}}],["evalu",{"_index":1694,"t":{"343":{"position":[[67,8]]}}}],["evaluate_accuracy_gpu(net",{"_index":1961,"t":{"392":{"position":[[900,26],[2378,26]]}}}],["exactli",{"_index":1710,"t":{"345":{"position":[[220,7]]}}}],["exampl",{"_index":265,"t":{"29":{"position":[[370,7]]},"101":{"position":[[1426,7]]},"283":{"position":[[90,30]]}}}],["examples/sec",{"_index":2030,"t":{"392":{"position":[[2596,12]]}}}],["excit",{"_index":585,"t":{"159":{"position":[[12,10]]}}}],["excitation激励操作就是通过sigmoid",{"_index":589,"t":{"159":{"position":[[153,42]]}}}],["execut",{"_index":408,"t":{"101":{"position":[[788,7]]}}}],["exit",{"_index":157,"t":{"27":{"position":[[109,4],[1321,4]]}}}],["exit(0",{"_index":259,"t":{"27":{"position":[[2343,8]]}}}],["exit(1",{"_index":176,"t":{"27":{"position":[[426,8],[1641,8]]}}}],["exit(overflow",{"_index":1386,"t":{"302":{"position":[[1705,15]]}}}],["export",{"_index":2094,"t":{"430":{"position":[[52,6],[93,6],[133,6]]}}}],["extra_c_opt",{"_index":436,"t":{"101":{"position":[[1271,15],[1341,15]]}}}],["f",{"_index":19,"t":{"2":{"position":[[313,1],[717,1]]},"4":{"position":[[305,1],[709,1]]},"15":{"position":[[313,1],[717,1]]},"17":{"position":[[305,1],[709,1]]},"295":{"position":[[313,7]]},"309":{"position":[[237,2],[573,3]]},"320":{"position":[[290,1]]},"322":{"position":[[279,1]]},"392":{"position":[[94,1]]}}}],["f(x",{"_index":55,"t":{"6":{"position":[[200,4]]},"19":{"position":[[200,4]]},"325":{"position":[[176,4]]}}}],["f(x)={0x<0xx≥0(1)f(x",{"_index":30,"t":{"2":{"position":[[438,21]]},"4":{"position":[[430,21]]},"15":{"position":[[438,21]]},"17":{"position":[[430,21]]}}}],["f(x)={0x<0xx≥0(3)f(x",{"_index":1572,"t":{"322":{"position":[[0,21]]}}}],["f(x)]^2",{"_index":51,"t":{"6":{"position":[[126,9],[234,9]]},"19":{"position":[[126,9],[234,9]]},"325":{"position":[[102,9],[210,9]]}}}],["f,a",{"_index":1429,"t":{"302":{"position":[[2768,3]]}}}],["f,b",{"_index":1430,"t":{"302":{"position":[[2772,3]]}}}],["f,e",{"_index":1431,"t":{"302":{"position":[[2776,3]]}}}],["f.relu(x",{"_index":39,"t":{"2":{"position":[[766,9]]},"4":{"position":[[758,9]]},"15":{"position":[[766,9]]},"17":{"position":[[758,9]]},"322":{"position":[[328,9]]}}}],["f.sigmoid(x",{"_index":24,"t":{"2":{"position":[[362,12]]},"4":{"position":[[354,12]]},"15":{"position":[[362,12]]},"17":{"position":[[354,12]]},"320":{"position":[[339,12]]}}}],["f1",{"_index":748,"t":{"246":{"position":[[131,2],[146,2]]}}}],["f10",{"_index":750,"t":{"246":{"position":[[269,3]]}}}],["f11",{"_index":751,"t":{"246":{"position":[[280,3]]}}}],["f12",{"_index":746,"t":{"246":{"position":[[73,3],[89,3]]}}}],["f5",{"_index":749,"t":{"246":{"position":[[245,2],[260,2]]}}}],["f=0",{"_index":1542,"t":{"309":{"position":[[623,4]]}}}],["f=1",{"_index":1548,"t":{"309":{"position":[[801,4]]}}}],["fals",{"_index":883,"t":{"260":{"position":[[862,7]]},"298":{"position":[[540,6],[1327,6],[1753,7],[2087,7]]},"300":{"position":[[736,6],[2257,20]]},"302":{"position":[[86,5]]},"450":{"position":[[141,6]]}}}],["fa根据当前的状态及扫描的输入字符,便能唯一地知道fa",{"_index":329,"t":{"50":{"position":[[24,49]]}}}],["fcn",{"_index":565,"t":{"145":{"position":[[234,9]]}}}],["file",{"_index":210,"t":{"27":{"position":[[986,4]]},"29":{"position":[[387,5]]},"99":{"position":[[11,6]]},"101":{"position":[[1228,5],[1443,5]]}}}],["file=/usr/local/share/bochs/bio",{"_index":277,"t":{"29":{"position":[[541,32]]},"101":{"position":[[1597,32]]}}}],["file=/usr/local/share/bochs/vgabio",{"_index":273,"t":{"29":{"position":[[466,35]]},"101":{"position":[[1522,35]]}}}],["filesystem",{"_index":229,"t":{"27":{"position":[[1790,12]]}}}],["fill",{"_index":958,"t":{"275":{"position":[[694,4]]}}}],["fill_n",{"_index":959,"t":{"275":{"position":[[714,6]]}}}],["filter都对输入图像的所有通道完成一次卷积,filter中的kernel",{"_index":1677,"t":{"336":{"position":[[46,59]]}}}],["final",{"_index":1722,"t":{"347":{"position":[[109,5]]}}}],["find",{"_index":942,"t":{"275":{"position":[[241,4]]}}}],["find_end",{"_index":943,"t":{"275":{"position":[[262,8]]}}}],["find_first_of",{"_index":945,"t":{"275":{"position":[[295,13]]}}}],["find_if",{"_index":947,"t":{"275":{"position":[[336,7]]}}}],["find_if_notc++11",{"_index":948,"t":{"275":{"position":[[360,16]]}}}],["findallpath(algraph",{"_index":1397,"t":{"302":{"position":[[2220,19]]}}}],["findallpath(g",{"_index":1415,"t":{"302":{"position":[[2588,14],[3068,14]]}}}],["finder小组件中appl",{"_index":2113,"t":{"437":{"position":[[273,21]]}}}],["finder栏中plasmoid",{"_index":2116,"t":{"439":{"position":[[0,29]]}}}],["find,拷贝copy,删除erase,替换replace,插入insert",{"_index":1085,"t":{"281":{"position":[[287,41]]}}}],["first",{"_index":835,"t":{"258":{"position":[[24,6]]},"260":{"position":[[280,6]]},"277":{"position":[[765,6]]},"279":{"position":[[786,6]]},"305":{"position":[[178,6]]},"307":{"position":[[178,6]]}}}],["first(该非终结符)减去ϵ\\epsilonϵ的所有终结符元素都加入至follow",{"_index":341,"t":{"57":{"position":[[154,52]]}}}],["first=1",{"_index":1505,"t":{"305":{"position":[[1745,8]]},"307":{"position":[[1410,8]]}}}],["firstarc",{"_index":1347,"t":{"302":{"position":[[536,10]]}}}],["first和last",{"_index":1060,"t":{"279":{"position":[[150,47]]}}}],["first和last所指定的序列[first",{"_index":1038,"t":{"277":{"position":[[781,28],[884,29],[1177,33]]},"279":{"position":[[802,28],[905,29]]}}}],["first集、follow集是针对于符号串而言的,而select",{"_index":342,"t":{"59":{"position":[[3,49]]}}}],["fish的配置文件:~/.config/fish/config.fish",{"_index":2103,"t":{"432":{"position":[[0,36]]}}}],["flag",{"_index":431,"t":{"101":{"position":[[1200,5]]},"207":{"position":[[93,4],[236,4],[322,5]]}}}],["flip",{"_index":740,"t":{"231":{"position":[[183,7]]}}}],["float",{"_index":840,"t":{"258":{"position":[[59,5]]},"265":{"position":[[178,5],[191,5]]}}}],["float.h",{"_index":755,"t":{"249":{"position":[[65,7]]}}}],["floppya",{"_index":282,"t":{"29":{"position":[[620,8],[658,9]]},"101":{"position":[[1676,8],[1714,9]]}}}],["fno",{"_index":437,"t":{"101":{"position":[[1321,3]]}}}],["focal",{"_index":582,"t":{"157":{"position":[[0,5]]}}}],["fold",{"_index":1689,"t":{"343":{"position":[[2,4],[128,4],[198,6],[268,4]]},"345":{"position":[[23,4],[61,5],[122,5],[204,4]]}}}],["follow",{"_index":339,"t":{"57":{"position":[[3,20],[24,29],[129,24]]}}}],["follow集加入到该非终结符的follow",{"_index":340,"t":{"57":{"position":[[88,40]]}}}],["follow集解决的话则是slr(1",{"_index":360,"t":{"81":{"position":[[221,29]]}}}],["font",{"_index":2165,"t":{"450":{"position":[[415,4]]}}}],["for(i,f_start,f_end",{"_index":794,"t":{"255":{"position":[[141,20]]}}}],["for(int",{"_index":795,"t":{"255":{"position":[[162,7]]},"298":{"position":[[901,7],[1334,7],[1981,7],[2270,7]]},"300":{"position":[[1828,7],[1956,7]]},"305":{"position":[[1275,7],[1754,7],[1806,7],[1855,7]]},"307":{"position":[[977,7],[1419,7],[1463,7],[1504,7]]},"309":{"position":[[690,7],[732,7]]}}}],["for_each",{"_index":949,"t":{"275":{"position":[[394,8]]}}}],["fork",{"_index":2128,"t":{"439":{"position":[[179,6]]}}}],["forward",{"_index":72,"t":{"9":{"position":[[233,12]]}}}],["forward(self",{"_index":1937,"t":{"392":{"position":[[368,13],[859,13]]}}}],["found",{"_index":1240,"t":{"298":{"position":[[1266,6],[1319,5],[1551,5],[1579,8]]}}}],["four",{"_index":841,"t":{"258":{"position":[[65,5]]}}}],["four:0.25",{"_index":856,"t":{"260":{"position":[[291,9]]}}}],["frac{1}{1",{"_index":4,"t":{"2":{"position":[[57,10]]},"4":{"position":[[49,10]]},"15":{"position":[[57,10]]},"17":{"position":[[49,10]]},"320":{"position":[[26,10]]}}}],["frac{\\lambda}{2",{"_index":1898,"t":{"380":{"position":[[74,17],[255,17]]}}}],["frac{\\parti",{"_index":1767,"t":{"355":{"position":[[788,14],[1368,14],[1613,14],[2041,14]]},"380":{"position":[[299,14],[459,14]]}}}],["frac{\\sum{i(pred_i==y_i)}}{len(i",{"_index":1867,"t":{"372":{"position":[[160,35]]}}}],["frac{d|w|}{d",{"_index":1798,"t":{"355":{"position":[[1691,13]]}}}],["frac{e^{y_i}}{\\sum_{j}^{n}{e^{y^j",{"_index":43,"t":{"2":{"position":[[849,37]]},"4":{"position":[[841,37]]},"15":{"position":[[849,37]]},"17":{"position":[[841,37]]}}}],["frac{p(b|a",{"_index":604,"t":{"182":{"position":[[469,12]]},"187":{"position":[[469,12]]}}}],["frac{shape_{input",{"_index":1565,"t":{"316":{"position":[[343,19]]},"332":{"position":[[273,19]]}}}],["free",{"_index":203,"t":{"27":{"position":[[895,4]]},"44":{"position":[[54,4]]}}}],["freebsd",{"_index":413,"t":{"101":{"position":[[891,8]]}}}],["front",{"_index":664,"t":{"211":{"position":[[72,8]]}}}],["function",{"_index":18,"t":{"2":{"position":[[299,10],[703,10]]},"4":{"position":[[291,10],[695,10]]},"15":{"position":[[299,10],[703,10]]},"17":{"position":[[291,10],[695,10]]},"320":{"position":[[276,10]]},"322":{"position":[[265,10]]},"392":{"position":[[80,10]]},"430":{"position":[[31,8],[200,8]]}}}],["function中起到更新的作用,因此不进行nm",{"_index":555,"t":{"141":{"position":[[757,25]]}}}],["g",{"_index":1224,"t":{"298":{"position":[[774,2],[1153,2]]},"300":{"position":[[1070,2]]},"302":{"position":[[938,2],[2240,2],[2824,2]]}}}],["g(8",{"_index":882,"t":{"260":{"position":[[857,4]]},"298":{"position":[[1748,4]]}}}],["g.addedge(0",{"_index":1245,"t":{"298":{"position":[[1761,12],[1778,12]]}}}],["g.addedge(1",{"_index":1246,"t":{"298":{"position":[[1795,12]]}}}],["g.addedge(3",{"_index":1247,"t":{"298":{"position":[[1812,12],[1829,12]]}}}],["g.addedge(4",{"_index":1248,"t":{"298":{"position":[[1846,12],[1863,12]]}}}],["g.addedge(5",{"_index":1249,"t":{"298":{"position":[[1880,12],[1897,12]]}}}],["g.addedge(6",{"_index":1250,"t":{"298":{"position":[[1914,12]]}}}],["g.addedge(src,dst",{"_index":1277,"t":{"300":{"position":[[1155,19]]}}}],["g.bfs(0",{"_index":1252,"t":{"298":{"position":[[1948,9]]}}}],["g.erase(g.begin",{"_index":1230,"t":{"298":{"position":[[862,19]]}}}],["g.pop_back",{"_index":1244,"t":{"298":{"position":[[1672,13]]}}}],["g.push_back(id1",{"_index":1235,"t":{"298":{"position":[[1026,17],[1459,17]]}}}],["g.push_back(start",{"_index":1226,"t":{"298":{"position":[[782,19],[1161,19]]}}}],["g.vers[i].data",{"_index":1366,"t":{"302":{"position":[[1009,15]]}}}],["g.vers[path[i]].data",{"_index":1408,"t":{"302":{"position":[[2429,21]]}}}],["g.vertexs.begin",{"_index":1279,"t":{"300":{"position":[[1207,18]]}}}],["g.vertexs[u].connectors.clear();//清空u",{"_index":1309,"t":{"300":{"position":[[2130,42]]}}}],["g1(6",{"_index":1255,"t":{"298":{"position":[[2081,5]]}}}],["g1.addedge(0",{"_index":1256,"t":{"298":{"position":[[2095,13],[2113,13],[2131,13]]}}}],["g1.addedge(1",{"_index":1257,"t":{"298":{"position":[[2149,13]]}}}],["g1.addedge(2",{"_index":1260,"t":{"298":{"position":[[2221,13]]}}}],["g1.addedge(4",{"_index":1258,"t":{"298":{"position":[[2167,13]]}}}],["g1.addedge(5",{"_index":1259,"t":{"298":{"position":[[2185,13],[2203,13]]}}}],["g1.dfs(0",{"_index":1262,"t":{"298":{"position":[[2259,10]]}}}],["g=graph(n",{"_index":1273,"t":{"300":{"position":[[1097,10]]}}}],["g[0",{"_index":1229,"t":{"298":{"position":[[856,5]]}}}],["g[g.size",{"_index":1241,"t":{"298":{"position":[[1304,10],[1600,10]]}}}],["gcc",{"_index":399,"t":{"101":{"position":[[654,3],[973,3]]}}}],["geeko",{"_index":151,"t":{"27":{"position":[[0,8],[640,6]]},"29":{"position":[[0,16]]},"91":{"position":[[110,12],[123,7],[206,25],[270,16]]},"93":{"position":[[70,15]]},"95":{"position":[[0,18]]},"99":{"position":[[0,10],[34,14]]},"101":{"position":[[34,8],[360,8],[584,8],[1149,8],[1371,7],[2132,15]]}}}],["geekos!\\n",{"_index":249,"t":{"27":{"position":[[2093,12]]}}}],["geekos/bootinfo.h",{"_index":213,"t":{"27":{"position":[[1014,19]]}}}],["geekos/crc32.h",{"_index":217,"t":{"27":{"position":[[1121,16]]}}}],["geekos/int.h",{"_index":219,"t":{"27":{"position":[[1171,14]]}}}],["geekos/keyboard.h",{"_index":223,"t":{"27":{"position":[[1274,19]]}}}],["geekos/kthread.h",{"_index":220,"t":{"27":{"position":[[1195,18]]}}}],["geekos/mem.h",{"_index":216,"t":{"27":{"position":[[1097,14]]}}}],["geekos/screen.h",{"_index":215,"t":{"27":{"position":[[1070,17]]}}}],["geekos/string.h",{"_index":214,"t":{"27":{"position":[[1043,17]]}}}],["geekos/timer.h",{"_index":222,"t":{"27":{"position":[[1248,16]]}}}],["geekos/trap.h",{"_index":221,"t":{"27":{"position":[[1223,15]]}}}],["geekos/tss.h",{"_index":218,"t":{"27":{"position":[[1147,14]]}}}],["geekos中makefil",{"_index":386,"t":{"101":{"position":[[193,23],[238,19]]}}}],["geekos是一个基于x86",{"_index":362,"t":{"91":{"position":[[0,27]]}}}],["geekos环境的配置,下面我们来验证环境配置的成功与否以及project",{"_index":150,"t":{"25":{"position":[[0,49]]}}}],["geekos设计的7",{"_index":367,"t":{"91":{"position":[[174,20]]}}}],["geekos运行依托于boch",{"_index":373,"t":{"93":{"position":[[21,17]]}}}],["gener",{"_index":414,"t":{"101":{"position":[[907,9]]},"275":{"position":[[742,8]]}}}],["general_opt",{"_index":393,"t":{"101":{"position":[[490,15],[541,15],[1246,12],[1295,12]]}}}],["generate_n",{"_index":960,"t":{"275":{"position":[[786,10]]}}}],["geq",{"_index":32,"t":{"2":{"position":[[494,4],[630,4]]},"4":{"position":[[486,4],[622,4]]},"15":{"position":[[494,4],[630,4]]},"17":{"position":[[486,4],[622,4]]},"322":{"position":[[56,4],[192,4]]}}}],["getheight(int",{"_index":1479,"t":{"305":{"position":[[970,13]]},"307":{"position":[[680,13]]}}}],["getlin",{"_index":1112,"t":{"281":{"position":[[1007,12]]}}}],["global",{"_index":2125,"t":{"439":{"position":[[136,6]]}}}],["global_step",{"_index":2082,"t":{"424":{"position":[[232,14]]},"426":{"position":[[198,14]]}}}],["gnu",{"_index":419,"t":{"101":{"position":[[999,3]]}}}],["gradient",{"_index":69,"t":{"9":{"position":[[146,8]]},"372":{"position":[[297,8],[417,8]]},"450":{"position":[[242,9]]}}}],["grammar,cfg",{"_index":320,"t":{"44":{"position":[[59,12]]}}}],["grammar,rg",{"_index":327,"t":{"44":{"position":[[265,11]]}}}],["graph",{"_index":866,"t":{"260":{"position":[[524,15],[555,5],[718,7],[844,6],[851,5]]},"298":{"position":[[258,15],[289,5],[1722,6],[1742,5],[2075,5]]},"300":{"position":[[399,15],[430,5],[593,7],[1057,6],[1064,5]]}}}],["graph(int",{"_index":871,"t":{"260":{"position":[[642,9]]},"298":{"position":[[376,9]]},"300":{"position":[[517,9]]}}}],["graphkind",{"_index":1341,"t":{"302":{"position":[[379,10],[601,9]]}}}],["grate",{"_index":454,"t":{"118":{"position":[[3,8],[61,8],[112,8]]}}}],["gray",{"_index":250,"t":{"27":{"position":[[2137,7]]}}}],["green|bright",{"_index":247,"t":{"27":{"position":[[2059,15]]}}}],["grid",{"_index":536,"t":{"141":{"position":[[206,6],[368,8],[471,6]]}}}],["h",{"_index":189,"t":{"27":{"position":[[703,2]]},"141":{"position":[[304,2]]},"149":{"position":[[27,2],[150,2],[193,2]]},"151":{"position":[[21,2]]},"153":{"position":[[21,2],[94,2]]},"159":{"position":[[53,2]]},"262":{"position":[[26,3]]},"302":{"position":[[1551,1],[1597,3]]},"332":{"position":[[20,1],[155,2],[442,1],[460,2]]},"357":{"position":[[224,2],[362,2]]},"359":{"position":[[396,5]]},"444":{"position":[[54,1]]}}}],["h(p",{"_index":1593,"t":{"327":{"position":[[312,4],[1157,4],[1213,4],[1231,4]]}}}],["h(p)=−∑inpi",{"_index":1590,"t":{"327":{"position":[[259,11]]}}}],["h)+\\lambda",{"_index":1817,"t":{"357":{"position":[[248,11]]}}}],["h,t",{"_index":1370,"t":{"302":{"position":[[1182,4]]}}}],["h=2x(wx+b−y)h=2",{"_index":1810,"t":{"357":{"position":[[49,15]]}}}],["hash",{"_index":927,"t":{"273":{"position":[[609,19],[646,19],[678,19],[715,19]]}}}],["hat{i",{"_index":1747,"t":{"355":{"position":[[156,10]]}}}],["heap",{"_index":704,"t":{"221":{"position":[[30,5]]}}}],["heap.clear",{"_index":705,"t":{"221":{"position":[[36,13]]}}}],["heap.empti",{"_index":707,"t":{"221":{"position":[[63,13]]}}}],["heap.push",{"_index":708,"t":{"221":{"position":[[107,10]]}}}],["heap.siz",{"_index":706,"t":{"221":{"position":[[50,12]]}}}],["higher",{"_index":1584,"t":{"327":{"position":[[90,6],[132,6]]}}}],["hinton提出的一种卷积神经网络模型,它主要应用于图像分类任务。在当时,alexnet的表现远远超过了其他参赛的网络模型,并且在imagenet",{"_index":1916,"t":{"383":{"position":[[55,83]]}}}],["hit",{"_index":158,"t":{"27":{"position":[[114,3],[1326,3]]}}}],["hollings@cs.umd.edu",{"_index":196,"t":{"27":{"position":[[783,21]]}}}],["hollingsworth",{"_index":195,"t":{"27":{"position":[[769,13]]}}}],["host",{"_index":406,"t":{"101":{"position":[[735,4],[805,4]]}}}],["host_cc",{"_index":417,"t":{"101":{"position":[[962,7]]}}}],["hovemey",{"_index":190,"t":{"27":{"position":[[706,9]]}}}],["http://127.0.0.1:7890",{"_index":1930,"t":{"392":{"position":[[199,23]]}}}],["http_proxi",{"_index":2101,"t":{"430":{"position":[[244,10]]}}}],["http_proxy=http://127.0.0.1:7890",{"_index":2096,"t":{"430":{"position":[[100,32]]}}}],["https://127.0.0.1:7890",{"_index":1932,"t":{"392":{"position":[[251,24]]}}}],["https://blog.csdn.net/f_zyj/article/details/51594851",{"_index":908,"t":{"271":{"position":[[4,52]]}}}],["https://download.csdn.net/download/f_zyj/9988653",{"_index":909,"t":{"271":{"position":[[57,48]]}}}],["https://towardsdatascience.com/intuit",{"_index":1724,"t":{"349":{"position":[[108,46]]}}}],["https://www.bilibili.com/video/bv12u411s7us/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":351,"t":{"76":{"position":[[284,107]]}}}],["https://www.bilibili.com/video/bv13r4y1m7sq/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":358,"t":{"76":{"position":[[732,107]]}}}],["https://www.bilibili.com/video/bv1564y1e7b9/?spm_id_from=333.999.0.0&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":2167,"t":{"452":{"position":[[28,116]]}}}],["https://www.bilibili.com/video/bv1pl4y1e7re/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":348,"t":{"76":{"position":[[121,107]]}}}],["https://www.bilibili.com/video/bv1vm4y1q7xb/?spm_id_from=333.788&vd_source=24d8fcf68bc0e2b0003defe0995cf533",{"_index":354,"t":{"76":{"position":[[458,107]]}}}],["https_proxi",{"_index":2102,"t":{"430":{"position":[[261,11]]}}}],["https_proxy=https://127.0.0.1:7890",{"_index":2097,"t":{"430":{"position":[[140,34]]}}}],["hwnew",{"_index":1813,"t":{"357":{"position":[[134,5]]}}}],["hw×cihw",{"_index":1652,"t":{"330":{"position":[[51,15]]}}}],["hyperparamet",{"_index":1721,"t":{"347":{"position":[[80,14]]}}}],["h′=h+n−1n(1)h'=\\frac{h+n",{"_index":448,"t":{"110":{"position":[[275,24]]}}}],["h′以及w′h'以及w'h′以及w",{"_index":1664,"t":{"332":{"position":[[177,24]]}}}],["h外,w",{"_index":1836,"t":{"361":{"position":[[0,24]]}}}],["h给我们一个w",{"_index":1825,"t":{"359":{"position":[[74,38]]}}}],["i+1",{"_index":1447,"t":{"302":{"position":[[3182,4]]}}}],["i,a,b",{"_index":1368,"t":{"302":{"position":[[1144,6]]}}}],["i,j",{"_index":1432,"t":{"302":{"position":[[2802,4]]}}}],["i,ji,ji,j代表输出神经元的二维索引坐标,h,wh,wh,w",{"_index":1549,"t":{"314":{"position":[[27,49]]}}}],["i/o",{"_index":764,"t":{"249":{"position":[[188,5]]}}}],["i440fxsupport",{"_index":295,"t":{"29":{"position":[[824,14]]},"101":{"position":[[1880,14]]}}}],["i<0",{"_index":1374,"t":{"302":{"position":[[1316,5],[1379,5]]}}}],["i<=k",{"_index":1405,"t":{"302":{"position":[[2386,5]]}}}],["ir2",{"_index":1531,"t":{"309":{"position":[[352,9]]}}}],["if(la>ra",{"_index":1464,"t":{"305":{"position":[[365,9],[707,9]]},"307":{"position":[[266,9],[495,9]]}}}],["if(n==0||m==0",{"_index":1313,"t":{"300":{"position":[[2300,15]]}}}],["if(num==n",{"_index":1311,"t":{"300":{"position":[[2196,10]]}}}],["if(read_key(&keycod",{"_index":163,"t":{"27":{"position":[[161,22],[1373,22]]}}}],["if(rt",{"_index":1540,"t":{"309":{"position":[[560,7]]}}}],["if(rt==0",{"_index":1480,"t":{"305":{"position":[[990,9],[1380,9],[1495,9],[1611,9]]},"307":{"position":[[700,9],[1061,9],[1168,9],[1276,9]]}}}],["if(t[w].l!=0",{"_index":1485,"t":{"305":{"position":[[1195,13]]},"307":{"position":[[897,13]]}}}],["if(t[w].r!=0",{"_index":1487,"t":{"305":{"position":[[1225,13]]},"307":{"position":[[927,13]]}}}],["if(topologicalsort",{"_index":1319,"t":{"300":{"position":[[2423,22]]}}}],["ifm",{"_index":2157,"t":{"450":{"position":[[266,3],[300,3],[339,3],[379,3]]}}}],["ifndef",{"_index":1325,"t":{"302":{"position":[[37,7]]}}}],["image数据,在传入visdom时仍需要先转化为numpi",{"_index":2085,"t":{"424":{"position":[[355,35]]}}}],["import",{"_index":17,"t":{"2":{"position":[[292,6],[315,6],[696,6],[719,6]]},"4":{"position":[[284,6],[307,6],[688,6],[711,6]]},"15":{"position":[[292,6],[315,6],[696,6],[719,6]]},"17":{"position":[[284,6],[307,6],[688,6],[711,6]]},"320":{"position":[[269,6],[292,6]]},"322":{"position":[[258,6],[281,6]]},"327":{"position":[[1474,6]]},"392":{"position":[[0,6],[13,6],[43,6],[73,6],[105,6],[141,6],[162,6]]},"424":{"position":[[12,6]]},"426":{"position":[[12,6]]}}}],["in[maxn",{"_index":1523,"t":{"309":{"position":[[255,9]]}}}],["includ",{"_index":212,"t":{"27":{"position":[[1005,8],[1034,8],[1061,8],[1088,8],[1112,8],[1138,8],[1162,8],[1186,8],[1214,8],[1239,8],[1265,8]]},"126":{"position":[[40,8]]},"195":{"position":[[0,8]]},"201":{"position":[[0,8]]},"207":{"position":[[0,8]]},"229":{"position":[[0,8]]},"275":{"position":[[2062,8]]},"277":{"position":[[1443,8],[1463,8]]},"298":{"position":[[0,8],[20,8],[38,8]]},"300":{"position":[[0,8],[21,8],[41,8],[59,8]]}}}],["include::iter",{"_index":1138,"t":{"285":{"position":[[765,14]]}}}],["int[col",{"_index":907,"t":{"269":{"position":[[84,9]]}}}],["int[nrow",{"_index":899,"t":{"267":{"position":[[77,8]]}}}],["intuit",{"_index":1823,"t":{"359":{"position":[[50,9],[139,9]]}}}],["intut",{"_index":1828,"t":{"359":{"position":[[384,8],[558,8]]}}}],["ios::sync_with_stdio(fals",{"_index":641,"t":{"203":{"position":[[346,28]]}}}],["iostream",{"_index":1050,"t":{"277":{"position":[[1452,10]]},"298":{"position":[[9,10]]},"300":{"position":[[30,10]]}}}],["is_heap",{"_index":1006,"t":{"275":{"position":[[2236,7]]}}}],["is_heap_untilc++11",{"_index":1007,"t":{"275":{"position":[[2258,18]]}}}],["is_list",{"_index":1289,"t":{"300":{"position":[[1508,10]]}}}],["is_partitionedc++11",{"_index":983,"t":{"275":{"position":[[1542,19]]}}}],["is_permutationc++11",{"_index":1012,"t":{"275":{"position":[[2397,19]]}}}],["is_sorted_untilc++11",{"_index":990,"t":{"275":{"position":[[1749,20]]}}}],["is_sortedc++11",{"_index":989,"t":{"275":{"position":[[1722,14]]}}}],["is_tre",{"_index":1293,"t":{"300":{"position":[[1656,10]]}}}],["isclos",{"_index":2151,"t":{"450":{"position":[[128,12]]}}}],["isdag",{"_index":870,"t":{"260":{"position":[[624,6],[660,6]]},"298":{"position":[[358,6],[394,6],[550,7]]},"300":{"position":[[499,6],[535,6],[746,7]]}}}],["isdag(1",{"_index":876,"t":{"260":{"position":[[741,8]]},"300":{"position":[[616,8]]}}}],["isdag(isdag",{"_index":873,"t":{"260":{"position":[[682,12]]},"298":{"position":[[416,12]]},"300":{"position":[[557,12]]}}}],["isinstance(net",{"_index":1964,"t":{"392":{"position":[[955,15]]}}}],["isinstance(x",{"_index":1971,"t":{"392":{"position":[[1114,13]]}}}],["it'",{"_index":1581,"t":{"327":{"position":[[62,4]]}}}],["iter",{"_index":769,"t":{"251":{"position":[[78,8]]},"275":{"position":[[835,22]]},"345":{"position":[[8,9],[272,9]]}}}],["iter_swap",{"_index":961,"t":{"275":{"position":[[825,9]]}}}],["it指向的元素前插入n个新元素val",{"_index":1037,"t":{"277":{"position":[[729,22]]},"279":{"position":[[750,22]]}}}],["it指向的元素前插入新元素val",{"_index":1036,"t":{"277":{"position":[[685,20]]},"279":{"position":[[706,20]]}}}],["iulian",{"_index":198,"t":{"27":{"position":[[827,6]]}}}],["j",{"_index":742,"t":{"246":{"position":[[7,1],[111,1]]},"298":{"position":[[1989,1],[1996,1],[2011,4]]},"302":{"position":[[2924,1],[3086,2]]}}}],["jeffrey",{"_index":193,"t":{"27":{"position":[[758,7]]}}}],["k",{"_index":194,"t":{"27":{"position":[[766,2]]},"246":{"position":[[161,2],[188,2]]},"302":{"position":[[2259,2]]},"343":{"position":[[0,1],[126,1]]},"345":{"position":[[21,1]]},"456":{"position":[[173,15]]}}}],["k+1",{"_index":1416,"t":{"302":{"position":[[2615,5]]}}}],["k_h",{"_index":1659,"t":{"332":{"position":[[73,3]]}}}],["k_wco​×ci​×kh​×kw",{"_index":1660,"t":{"332":{"position":[[84,18]]}}}],["kernel",{"_index":225,"t":{"27":{"position":[[1723,6],[1764,6],[2162,6]]},"29":{"position":[[994,7]]},"101":{"position":[[2050,7]]}}}],["kernel_size=5",{"_index":1946,"t":{"392":{"position":[[574,14],[673,15]]}}}],["kernel_thread",{"_index":180,"t":{"27":{"position":[[542,13],[2224,13]]}}}],["key",{"_index":254,"t":{"27":{"position":[[2192,4]]},"285":{"position":[[63,4],[920,31]]}}}],["key]操作是map很有特色的操作,如果在map中存在键值为key",{"_index":1133,"t":{"285":{"position":[[371,39]]}}}],["key_ctrl_flag)==key_ctrl_flag",{"_index":171,"t":{"27":{"position":[[317,29],[1529,29]]}}}],["key_release_flag",{"_index":166,"t":{"27":{"position":[[234,21],[1446,21]]}}}],["key_special_flag",{"_index":165,"t":{"27":{"position":[[202,17],[1414,17]]}}}],["keyboard_serial_delay",{"_index":288,"t":{"29":{"position":[[720,22]]},"101":{"position":[[1776,22]]}}}],["keycod",{"_index":161,"t":{"27":{"position":[[133,7],[141,8],[223,8],[284,7],[1345,7],[1353,8],[1435,8],[1496,7]]}}}],["key的元素对,值域为默认值。所以可以用该操作向map",{"_index":1134,"t":{"285":{"position":[[411,74]]}}}],["key部分作为标识,map中所有元素的key值必须是唯一的,multimap则允许有重复的key",{"_index":1128,"t":{"285":{"position":[[90,50]]}}}],["key(当另一个元素是整形时,m[key]=0",{"_index":1137,"t":{"285":{"position":[[694,58]]}}}],["kh=kw=1k_h=k_w=1kh​=kw​=1",{"_index":1651,"t":{"330":{"position":[[0,50]]}}}],["kind",{"_index":1349,"t":{"302":{"position":[[611,5],[1277,5]]}}}],["kkk",{"_index":1702,"t":{"343":{"position":[[179,3],[239,3]]},"345":{"position":[[57,3],[183,3]]}}}],["kl",{"_index":1600,"t":{"327":{"position":[[473,5],[479,15],[689,11]]}}}],["known",{"_index":1730,"t":{"351":{"position":[[51,5],[212,5]]}}}],["kpple",{"_index":2117,"t":{"439":{"position":[[30,5]]}}}],["krizhevsky、ilya",{"_index":1914,"t":{"383":{"position":[[20,15]]}}}],["kruskal",{"_index":595,"t":{"173":{"position":[[90,9]]}}}],["kullback–leibl",{"_index":1601,"t":{"327":{"position":[[495,16]]}}}],["k−1k",{"_index":1707,"t":{"345":{"position":[[112,4]]}}}],["k个变为v",{"_index":738,"t":{"231":{"position":[[152,7]]}}}],["k的结点是u(第k+1个是u",{"_index":1403,"t":{"302":{"position":[[2336,21]]}}}],["l",{"_index":361,"t":{"83":{"position":[[84,1]]},"85":{"position":[[76,1]]},"277":{"position":[[1527,2]]},"355":{"position":[[154,1]]},"392":{"position":[[2035,1]]}}}],["l(w",{"_index":1886,"t":{"378":{"position":[[43,4]]},"380":{"position":[[314,4]]}}}],["l(w,b",{"_index":1883,"t":{"378":{"position":[[4,6],[124,6]]}}}],["l(w,b)+λ2∥w∥12(2)l(w",{"_index":1897,"t":{"380":{"position":[[47,21]]}}}],["l(w_t",{"_index":1907,"t":{"380":{"position":[[474,6]]}}}],["l(y,z)=max(0,−y∗z)(1)l(y,z)=max(0",{"_index":2044,"t":{"400":{"position":[[366,34]]}}}],["l,r",{"_index":1453,"t":{"305":{"position":[[203,4]]},"307":{"position":[[203,4]]}}}],["l,r,d",{"_index":1524,"t":{"309":{"position":[[283,6]]}}}],["l.backward",{"_index":2014,"t":{"392":{"position":[[2054,12]]}}}],["l.push_back(x",{"_index":1052,"t":{"277":{"position":[[1555,15]]}}}],["l.size",{"_index":1053,"t":{"277":{"position":[[1586,8]]}}}],["l1",{"_index":1535,"t":{"309":{"position":[[433,3]]},"349":{"position":[[158,2]]},"351":{"position":[[60,2]]},"355":{"position":[[254,13],[268,27]]},"357":{"position":[[146,3]]},"361":{"position":[[68,4]]}}}],["l1,int",{"_index":1526,"t":{"309":{"position":[[315,6]]}}}],["l1:l1:l1",{"_index":1793,"t":{"355":{"position":[[1478,9]]}}}],["l1=(wx+b−y)2+λ∣w∣l_{1}=(w",{"_index":1751,"t":{"355":{"position":[[296,25]]}}}],["l1具有将权重推向0的影响,而l2没有,但这并不意味着由于l2的权重不能达到或者接近0",{"_index":1860,"t":{"363":{"position":[[580,46]]}}}],["l1和l2",{"_index":1723,"t":{"349":{"position":[[3,29],[259,36],[296,96]]},"353":{"position":[[0,53]]}}}],["l1和l2正则化分别归因于向量w的l1和l2",{"_index":1728,"t":{"351":{"position":[[0,37]]}}}],["l1和l2正则化的效果,让我们使用3",{"_index":1744,"t":{"355":{"position":[[0,45]]}}}],["l1完全减少了模型中的特征数量。以下是l1",{"_index":1847,"t":{"363":{"position":[[134,47]]}}}],["l1正则化(硬性限制)、l2",{"_index":1881,"t":{"376":{"position":[[200,35]]}}}],["l1正则化会使得一部分参数变为0,从而实现特征选择的效果;l2正则化则会使得模型参数尽量接近0",{"_index":1882,"t":{"376":{"position":[[236,93]]}}}],["l1正则化限制权重参数的l1",{"_index":1894,"t":{"378":{"position":[[154,26]]}}}],["l1的权重更新会受到第一点的影响,但来自l2",{"_index":1841,"t":{"361":{"position":[[151,78]]}}}],["l1范数进行正则化的线性回归模型称为lasso",{"_index":1735,"t":{"351":{"position":[[622,25]]}}}],["l2",{"_index":1725,"t":{"349":{"position":[[165,2]]},"351":{"position":[[221,2],[667,44],[854,15]]},"355":{"position":[[559,13]]},"361":{"position":[[73,3],[100,4],[146,4]]}}}],["l2,int",{"_index":1528,"t":{"309":{"position":[[329,6]]}}}],["l2:l2:l2",{"_index":1804,"t":{"355":{"position":[[1954,9]]},"357":{"position":[[317,9]]}}}],["l2=(wx+b−y)2+λw2l_{2}=(w",{"_index":1760,"t":{"355":{"position":[[596,24]]}}}],["l2正则化是指在模型的损失函数中,加入对模型参数的l2",{"_index":1896,"t":{"380":{"position":[[0,46]]}}}],["l2正则化项添加到l",{"_index":1759,"t":{"355":{"position":[[573,22]]}}}],["l2范数是对元素求平方和后再开根号,需要.pow(2",{"_index":48,"t":{"6":{"position":[[24,36]]},"19":{"position":[[24,36]]},"325":{"position":[[0,36]]}}}],["l:l:l",{"_index":1786,"t":{"355":{"position":[[1289,6]]},"357":{"position":[[99,6]]}}}],["l=(y^−y)2=(wx+b−y)2\\begin{align",{"_index":1746,"t":{"355":{"position":[[119,34]]}}}],["l[i",{"_index":1054,"t":{"277":{"position":[[1623,4]]}}}],["l_{1}}{\\partial",{"_index":1795,"t":{"355":{"position":[[1628,15]]}}}],["l_{2}}{\\partial",{"_index":1806,"t":{"355":{"position":[[2056,15]]}}}],["la",{"_index":1470,"t":{"305":{"position":[[477,3],[820,3]]},"307":{"position":[[346,3],[575,3]]}}}],["la,int",{"_index":1456,"t":{"305":{"position":[[253,6],[651,6]]},"307":{"position":[[239,6],[468,6]]}}}],["la,ra",{"_index":1460,"t":{"305":{"position":[[281,12],[679,12]]}}}],["label",{"_index":1644,"t":{"327":{"position":[[1632,5],[1678,6],[1815,5],[1864,6]]}}}],["label)16",{"_index":1784,"t":{"355":{"position":[[1250,9]]}}}],["label)9",{"_index":1778,"t":{"355":{"position":[[1056,8]]}}}],["lalr(1",{"_index":356,"t":{"76":{"position":[[620,10]]}}}],["lambda",{"_index":1801,"t":{"355":{"position":[[1813,8],[2108,7]]},"357":{"position":[[227,8],[367,7]]},"380":{"position":[[137,23],[337,7],[556,7]]}}}],["lambda)w_t",{"_index":1906,"t":{"380":{"position":[[440,11]]}}}],["last",{"_index":1027,"t":{"277":{"position":[[370,6],[422,10],[772,5],[876,7],[914,5],[1168,5],[1225,16]]},"279":{"position":[[144,5],[793,5],[897,7],[935,5]]}}}],["last)插入到迭代器it",{"_index":1039,"t":{"277":{"position":[[810,20]]},"279":{"position":[[831,20]]}}}],["last),[first",{"_index":1048,"t":{"277":{"position":[[1211,13]]}}}],["later",{"_index":401,"t":{"101":{"position":[[668,5]]}}}],["latest",{"_index":275,"t":{"29":{"position":[[507,6],[580,6]]},"101":{"position":[[1563,6],[1636,6]]}}}],["lb,int",{"_index":1458,"t":{"305":{"position":[[267,6],[665,6]]},"307":{"position":[[253,6],[482,6]]}}}],["lb,rb",{"_index":1461,"t":{"305":{"position":[[294,12],[692,12]]}}}],["ld",{"_index":420,"t":{"101":{"position":[[1003,2]]}}}],["learn",{"_index":1693,"t":{"343":{"position":[[55,8]]}}}],["lecun等人于1998年提出的卷积神经网络结构,该结构由卷积层、池化层和全连接层组成,可以高效地处理手写数字图像,并在mnist",{"_index":1922,"t":{"390":{"position":[[12,78]]}}}],["left\\{\\begin{align",{"_index":1800,"t":{"355":{"position":[[1718,24]]}}}],["left\\{\\begin{array}{l",{"_index":1816,"t":{"357":{"position":[[193,27]]}}}],["legend=[\"curve_name_1",{"_index":2088,"t":{"426":{"position":[[110,23]]}}}],["legend=['train",{"_index":1999,"t":{"392":{"position":[[1736,14]]}}}],["len",{"_index":487,"t":{"126":{"position":[[828,3],[949,3]]}}}],["len(train_it",{"_index":2003,"t":{"392":{"position":[[1818,15]]}}}],["len==1,则la==ra",{"_index":1462,"t":{"305":{"position":[[312,41]]}}}],["len=v.siz",{"_index":1489,"t":{"305":{"position":[[1261,13]]},"307":{"position":[[963,13]]}}}],["lenet",{"_index":1923,"t":{"390":{"position":[[91,5]]},"392":{"position":[[2745,5]]}}}],["lenet5",{"_index":2036,"t":{"392":{"position":[[2753,8]]}}}],["lenet5(nn.modul",{"_index":1940,"t":{"392":{"position":[[424,18]]}}}],["lenetreshap",{"_index":1944,"t":{"392":{"position":[[526,15]]}}}],["lenetreshape(nn.modul",{"_index":1933,"t":{"392":{"position":[[282,24]]}}}],["lenet是由yann",{"_index":1921,"t":{"390":{"position":[[0,11]]}}}],["leq",{"_index":1890,"t":{"378":{"position":[[101,4]]}}}],["less",{"_index":1586,"t":{"327":{"position":[[111,4]]}}}],["lexicographical_compar",{"_index":1013,"t":{"275":{"position":[[2437,23]]}}}],["lgorithm",{"_index":935,"t":{"275":{"position":[[0,12]]}}}],["lgpl",{"_index":274,"t":{"29":{"position":[[502,4]]},"101":{"position":[[1558,4]]}}}],["lighter",{"_index":2160,"t":{"450":{"position":[[284,9],[318,8]]}}}],["lightest",{"_index":2162,"t":{"450":{"position":[[357,9],[397,9]]}}}],["limits.h",{"_index":756,"t":{"249":{"position":[[78,8]]}}}],["line",{"_index":269,"t":{"29":{"position":[[423,5]]},"101":{"position":[[1479,5]]}}}],["linear",{"_index":27,"t":{"2":{"position":[[419,6]]},"4":{"position":[[411,6]]},"15":{"position":[[419,6]]},"17":{"position":[[411,6]]},"450":{"position":[[235,6]]}}}],["linker",{"_index":418,"t":{"101":{"position":[[991,7]]}}}],["linux",{"_index":412,"t":{"101":{"position":[[881,5]]},"446":{"position":[[3,15]]}}}],["linux操作系统后需要安装bochs以及nasm",{"_index":374,"t":{"93":{"position":[[39,30]]}}}],["linux自带的编译环境以及编译命令对特定的geeko",{"_index":375,"t":{"95":{"position":[[47,30]]}}}],["list",{"_index":105,"t":{"21":{"position":[[540,16]]},"23":{"position":[[530,16]]},"251":{"position":[[110,4]]},"279":{"position":[[0,8],[9,23],[198,10]]},"392":{"position":[[1128,6]]},"405":{"position":[[526,16]]}}}],["list.end",{"_index":655,"t":{"209":{"position":[[58,12],[138,13]]}}}],["lista(first",{"_index":1059,"t":{"279":{"position":[[126,17]]}}}],["lista(n",{"_index":1056,"t":{"279":{"position":[[51,13],[87,13]]}}}],["lista{1,2,3",{"_index":1055,"t":{"279":{"position":[[33,17]]}}}],["list和tensor",{"_index":101,"t":{"21":{"position":[[423,17]]},"23":{"position":[[413,17]]},"405":{"position":[[409,17]]}}}],["list和vector",{"_index":915,"t":{"273":{"position":[[228,45]]}}}],["list或deque实现,封闭头部即可,不用vector",{"_index":917,"t":{"273":{"position":[[282,51],[342,51]]}}}],["list的*乘法是复制元素,改变list的shap",{"_index":102,"t":{"21":{"position":[[441,26]]},"23":{"position":[[431,26]]},"405":{"position":[[427,26]]}}}],["ll",{"_index":803,"t":{"255":{"position":[[360,2],[547,2],[633,2],[667,2]]},"305":{"position":[[118,3]]},"307":{"position":[[118,3]]},"309":{"position":[[204,3]]}}}],["locale.h",{"_index":757,"t":{"249":{"position":[[92,8]]}}}],["locatevex_al(*pg",{"_index":1382,"t":{"302":{"position":[[1579,17],[1605,17]]}}}],["locatevex_al(algraph",{"_index":1364,"t":{"302":{"position":[[917,20]]}}}],["locatevex_al(g",{"_index":1436,"t":{"302":{"position":[[2899,15],[2928,15]]}}}],["log",{"_index":286,"t":{"29":{"position":[[701,4]]},"101":{"position":[[1757,4]]}}}],["log2(1pi)(7)\\begin{align",{"_index":1592,"t":{"327":{"position":[[286,25]]}}}],["log2(pi)=∑inpi",{"_index":1591,"t":{"327":{"position":[[271,14]]}}}],["log2(pi)−log2(qi)](9)d_{kl}(p",{"_index":1614,"t":{"327":{"position":[[842,30]]}}}],["log2(qi)(10)\\begin{align",{"_index":1626,"t":{"327":{"position":[[1187,25]]}}}],["log2​(pi​)=i∑n​pi",{"_index":1598,"t":{"327":{"position":[[437,18]]}}}],["log2​(pi​)−log2​(qi​)](9",{"_index":1618,"t":{"327":{"position":[[959,26]]}}}],["log2​(pi​1​)​(7",{"_index":1599,"t":{"327":{"position":[[456,16]]}}}],["log2​(qi​)​(10",{"_index":1632,"t":{"327":{"position":[[1357,15]]}}}],["log_2(p_i",{"_index":1616,"t":{"327":{"position":[[910,11]]}}}],["log_2(q_i)]}\\tag{9}dkl​(p",{"_index":1617,"t":{"327":{"position":[[922,25]]}}}],["logist",{"_index":2,"t":{"2":{"position":[[20,10]]},"4":{"position":[[12,10]]},"15":{"position":[[20,10]]},"17":{"position":[[12,10]]},"366":{"position":[[0,8]]}}}],["long",{"_index":466,"t":{"120":{"position":[[3,17],[21,32]]},"255":{"position":[[363,4],[368,4],[394,4],[399,4]]},"295":{"position":[[1010,4]]},"305":{"position":[[108,4],[113,4]]},"307":{"position":[[108,4],[113,4]]},"309":{"position":[[194,4],[199,4]]}}}],["loss",{"_index":61,"t":{"6":{"position":[[292,4]]},"19":{"position":[[292,4]]},"134":{"position":[[129,4]]},"157":{"position":[[6,43],[87,14]]},"392":{"position":[[1623,4],[1751,6]]},"424":{"position":[[280,8]]}}}],["loss(y_hat",{"_index":2013,"t":{"392":{"position":[[2039,11]]}}}],["loss.to(devic",{"_index":1995,"t":{"392":{"position":[[1658,15]]}}}],["lossmse=∑[y−f(x)]2(1)loss_{ms",{"_index":49,"t":{"6":{"position":[[81,31]]},"19":{"position":[[81,31]]}}}],["lossmse=∑[y−f(x)]2(5)loss_{ms",{"_index":1576,"t":{"325":{"position":[[57,31]]}}}],["loss得到6",{"_index":516,"t":{"134":{"position":[[74,11]]}}}],["lot",{"_index":301,"t":{"29":{"position":[[936,3]]},"101":{"position":[[1992,3]]}}}],["lower_bound",{"_index":998,"t":{"275":{"position":[[1975,11]]}}}],["lr",{"_index":344,"t":{"71":{"position":[[29,9]]},"392":{"position":[[1414,3],[2720,3],[2810,3]]}}}],["lr(0",{"_index":347,"t":{"76":{"position":[[98,8]]},"81":{"position":[[138,6],[159,13]]}}}],["lr(1",{"_index":353,"t":{"76":{"position":[[435,8]]},"81":{"position":[[153,5]]}}}],["lr=lr",{"_index":1993,"t":{"392":{"position":[[1616,6]]}}}],["lru",{"_index":447,"t":{"110":{"position":[[233,25]]}}}],["lu",{"_index":599,"t":{"179":{"position":[[676,57]]},"185":{"position":[[804,57]]}}}],["l}{\\partial",{"_index":1768,"t":{"355":{"position":[[803,11],[1383,11]]}}}],["l定义为平方误差,其中误差是i",{"_index":1745,"t":{"355":{"position":[[76,22]]}}}],["m",{"_index":426,"t":{"101":{"position":[[1086,1]]},"279":{"position":[[101,2]]},"285":{"position":[[323,2]]},"422":{"position":[[52,1]]}}}],["m.clear",{"_index":1145,"t":{"285":{"position":[[1053,10]]}}}],["m.empti",{"_index":1144,"t":{"285":{"position":[[1032,10]]}}}],["m.erase(it",{"_index":1142,"t":{"285":{"position":[[952,12]]}}}],["m.erase(key",{"_index":1141,"t":{"285":{"position":[[903,13]]}}}],["m.find(key",{"_index":1139,"t":{"285":{"position":[[785,12]]}}}],["m.insert(make_pair(key",{"_index":1135,"t":{"285":{"position":[[486,23]]}}}],["m.size",{"_index":1143,"t":{"285":{"position":[[1012,9]]}}}],["m32",{"_index":405,"t":{"101":{"position":[[729,3],[978,3]]}}}],["m[key",{"_index":1131,"t":{"285":{"position":[[352,6],[683,7]]}}}],["machin",{"_index":1692,"t":{"343":{"position":[[47,7]]}}}],["magnitud",{"_index":1840,"t":{"361":{"position":[[77,9]]}}}],["main",{"_index":178,"t":{"27":{"position":[[503,31]]},"126":{"position":[[769,6]]},"195":{"position":[[50,6]]},"201":{"position":[[50,6]]},"207":{"position":[[50,6]]},"277":{"position":[[1506,6]]},"298":{"position":[[1733,6]]},"300":{"position":[[2284,7]]},"302":{"position":[[2708,6]]},"305":{"position":[[1704,6]]},"307":{"position":[[1369,6]]},"309":{"position":[[658,6]]}}}],["main(struct",{"_index":233,"t":{"27":{"position":[[1838,11]]}}}],["make",{"_index":261,"t":{"29":{"position":[[70,4],[82,4]]},"91":{"position":[[338,7]]},"95":{"position":[[144,7]]},"101":{"position":[[147,19]]}}}],["make_heap",{"_index":1008,"t":{"275":{"position":[[2296,9]]}}}],["malloc(sizeof(arcnod",{"_index":1385,"t":{"302":{"position":[[1670,26]]}}}],["map",{"_index":671,"t":{"211":{"position":[[173,4]]},"229":{"position":[[9,5]]},"251":{"position":[[99,3]]},"285":{"position":[[0,7],[223,68],[292,13],[326,9],[339,9]]},"334":{"position":[[82,3]]},"340":{"position":[[43,21]]}}}],["map、<=、>=、==、!=,其规则是先比较first,first相等时再比较second",{"_index":1124,"t":{"283":{"position":[[246,110]]}}}],["pair也能进行sort",{"_index":696,"t":{"215":{"position":[[81,12]]}}}],["pair对象外,如果需要即时生成一个pair对象,也可以调用在其中定义的一个模版函数:make_pair。make_pair",{"_index":1125,"t":{"283":{"position":[[357,92]]}}}],["pair模版类需要两个参数:首元素的数据类型和尾元素的数据类型。pair模版类对象有两个成员:first和second",{"_index":1123,"t":{"283":{"position":[[173,72]]}}}],["paramet",{"_index":1718,"t":{"347":{"position":[[37,9]]},"361":{"position":[[136,9]]}}}],["partial_sort",{"_index":992,"t":{"275":{"position":[[1822,12]]}}}],["partial_sort_copi",{"_index":993,"t":{"275":{"position":[[1840,17]]}}}],["partit",{"_index":985,"t":{"275":{"position":[[1590,9]]},"343":{"position":[[183,11]]}}}],["partition_copyc++11",{"_index":986,"t":{"275":{"position":[[1611,19]]}}}],["partition_pointc++11",{"_index":987,"t":{"275":{"position":[[1643,20]]}}}],["path",{"_index":1406,"t":{"302":{"position":[[2397,11]]}}}],["path/terminal_proxy.sh",{"_index":2106,"t":{"432":{"position":[[96,23]]}}}],["path[k]=0",{"_index":1418,"t":{"302":{"position":[[2691,10]]}}}],["path[k]=u",{"_index":1402,"t":{"302":{"position":[[2317,10]]}}}],["path[max_vertex_num",{"_index":1395,"t":{"302":{"position":[[2162,21]]}}}],["pathnum",{"_index":1410,"t":{"302":{"position":[[2483,10]]}}}],["pathnum=0",{"_index":1396,"t":{"302":{"position":[[2193,10],[3006,10]]}}}],["pathnum==0",{"_index":1442,"t":{"302":{"position":[[3096,12]]}}}],["paths[i",{"_index":1448,"t":{"302":{"position":[[3187,10]]}}}],["paths[maxsize][max_vertex_num",{"_index":1394,"t":{"302":{"position":[[2119,31]]}}}],["paths[pathnum][i",{"_index":1407,"t":{"302":{"position":[[2409,17]]}}}],["paths[pathnum][i]='\\0",{"_index":1409,"t":{"302":{"position":[[2453,23]]}}}],["pc",{"_index":364,"t":{"91":{"position":[[102,7]]}}}],["penalti",{"_index":1880,"t":{"376":{"position":[[151,48]]}}}],["perform",{"_index":1695,"t":{"343":{"position":[[80,11]]},"345":{"position":[[354,12]]}}}],["permit",{"_index":205,"t":{"27":{"position":[[918,9]]}}}],["pg",{"_index":1356,"t":{"302":{"position":[[759,4],[779,2],[795,2],[844,2],[1087,4],[1274,2],[1336,2],[1399,2],[1470,2],[1748,3],[1790,2],[1872,2]]}}}],["pi",{"_index":807,"t":{"255":{"position":[[445,2]]}}}],["pii",{"_index":806,"t":{"255":{"position":[[412,3]]}}}],["pip",{"_index":2072,"t":{"419":{"position":[[0,3]]}}}],["plasmpkg2",{"_index":2139,"t":{"439":{"position":[[336,9]]}}}],["platform",{"_index":409,"t":{"101":{"position":[[810,9],[841,9]]}}}],["plt",{"_index":1928,"t":{"392":{"position":[[158,3]]}}}],["plt.show",{"_index":2032,"t":{"392":{"position":[[2628,10]]}}}],["po",{"_index":1196,"t":{"295":{"position":[[487,4],[511,3],[692,3],[725,3],[794,3],[932,3]]}}}],["point",{"_index":185,"t":{"27":{"position":[[660,5],[1743,6]]}}}],["polynomi",{"_index":612,"t":{"182":{"position":[[752,10]]},"187":{"position":[[752,10]]}}}],["pop",{"_index":665,"t":{"211":{"position":[[81,5],[122,5],[151,5]]},"298":{"position":[[1650,4]]}}}],["pop_heap",{"_index":1009,"t":{"275":{"position":[[2318,8]]}}}],["postt(int",{"_index":1501,"t":{"305":{"position":[[1595,9]]},"307":{"position":[[1260,9]]},"309":{"position":[[544,9]]}}}],["postt(rt",{"_index":1515,"t":{"305":{"position":[[2056,10]]},"307":{"position":[[1642,10]]},"309":{"position":[[806,10]]}}}],["postt(t[rt].l",{"_index":1502,"t":{"305":{"position":[[1629,15]]},"307":{"position":[[1294,15]]},"309":{"position":[[585,15]]}}}],["postt(t[rt].r",{"_index":1503,"t":{"305":{"position":[[1645,15]]},"307":{"position":[[1310,15]]},"309":{"position":[[601,15]]}}}],["pos处的二进制位置为0",{"_index":1209,"t":{"295":{"position":[[863,14]]}}}],["pp_arr",{"_index":898,"t":{"267":{"position":[[63,7]]}}}],["pre",{"_index":1530,"t":{"309":{"position":[[346,3]]}}}],["pre[maxn",{"_index":1522,"t":{"309":{"position":[[244,10]]}}}],["predic",{"_index":984,"t":{"275":{"position":[[1562,27]]}}}],["predict",{"_index":1640,"t":{"327":{"position":[[1533,7],[1580,7],[1768,7]]}}}],["press",{"_index":253,"t":{"27":{"position":[[2184,7]]}}}],["pret(int",{"_index":1493,"t":{"305":{"position":[[1365,8]]},"307":{"position":[[1046,8]]}}}],["pret(rt",{"_index":1513,"t":{"305":{"position":[[2022,9]]},"307":{"position":[[1629,9]]}}}],["pret(t[rt].l",{"_index":1496,"t":{"305":{"position":[[1435,14]]},"307":{"position":[[1116,14]]}}}],["pret(t[rt].r",{"_index":1497,"t":{"305":{"position":[[1450,14]]},"307":{"position":[[1131,14]]}}}],["prev_permut",{"_index":1021,"t":{"275":{"position":[[2667,16]]}}}],["prim",{"_index":596,"t":{"173":{"position":[[100,6]]}}}],["primari",{"_index":2159,"t":{"450":{"position":[[276,7],[310,7],[349,7],[389,7]]}}}],["print",{"_index":255,"t":{"27":{"position":[[2201,5]]}}}],["print(\"%c\",(asciicode=='\\r",{"_index":177,"t":{"27":{"position":[[443,28],[1658,28]]}}}],["print(\"\\n",{"_index":173,"t":{"27":{"position":[[376,9],[1588,9]]}}}],["print(\"argmax",{"_index":126,"t":{"21":{"position":[[885,13]]},"23":{"position":[[875,13]]},"405":{"position":[[871,13]]}}}],["print(\"to",{"_index":156,"t":{"27":{"position":[[99,9],[1311,9]]}}}],["print(\"welcom",{"_index":248,"t":{"27":{"position":[[2075,14]]}}}],["print(a",{"_index":106,"t":{"21":{"position":[[557,7]]},"23":{"position":[[547,7]]},"405":{"position":[[543,7]]}}}],["print(b",{"_index":109,"t":{"21":{"position":[[595,7]]},"23":{"position":[[585,7]]},"405":{"position":[[581,7]]}}}],["print(c.shap",{"_index":91,"t":{"21":{"position":[[266,14]]},"23":{"position":[[256,14]]},"405":{"position":[[252,14]]}}}],["print(d.shap",{"_index":98,"t":{"21":{"position":[[387,14]]},"23":{"position":[[377,14]]},"405":{"position":[[373,14]]}}}],["print(f'loss",{"_index":2024,"t":{"392":{"position":[[2464,12]]}}}],["print(f'{metric[2",{"_index":2028,"t":{"392":{"position":[[2545,18]]}}}],["print(i",{"_index":138,"t":{"21":{"position":[[1139,8],[1333,8]]},"23":{"position":[[1129,8],[1323,8]]},"405":{"position":[[1121,8],[1315,8]]}}}],["printf(\"%c",{"_index":1336,"t":{"302":{"position":[[312,12]]}}}],["printf(\"%d%c\",v[i],i==len",{"_index":1491,"t":{"305":{"position":[[1298,25]]},"307":{"position":[[1000,25]]}}}],["printf(\"%d\\n",{"_index":634,"t":{"201":{"position":[[430,14]]}}}],["printf(\"%d\\n\",t[rt].d",{"_index":1543,"t":{"309":{"position":[[628,23]]}}}],["printf(\"7.28",{"_index":1441,"t":{"302":{"position":[[3017,12]]}}}],["printf(\"\\t",{"_index":1443,"t":{"302":{"position":[[3111,10]]}}}],["printf(\"\\t%d",{"_index":1445,"t":{"302":{"position":[[3162,12]]}}}],["printf(first?first=0,\"%d",{"_index":1494,"t":{"305":{"position":[[1398,27],[1528,27],[1661,27]]},"307":{"position":[[1079,27],[1201,27],[1326,27]]}}}],["priority_queue,greater的元素对序列。序列中的元素以const",{"_index":1127,"t":{"285":{"position":[[68,21]]}}}],["t[maxn",{"_index":1454,"t":{"305":{"position":[[208,9]]},"307":{"position":[[208,9]]},"309":{"position":[[290,9]]}}}],["t[rt].d=pre[rt",{"_index":1536,"t":{"309":{"position":[[437,16]]}}}],["t[rt].l=create(l1,p1",{"_index":1537,"t":{"309":{"position":[[454,20]]}}}],["t[rt].l=mid_po_build(la,p1",{"_index":1476,"t":{"305":{"position":[[824,26]]},"307":{"position":[[579,26]]}}}],["t[rt].l=mid_pr_build(la,p1",{"_index":1471,"t":{"305":{"position":[[481,26]]},"307":{"position":[[350,26]]}}}],["t[rt].r=create(p1+1,r1,l2+p2+1,r2",{"_index":1539,"t":{"309":{"position":[[490,35]]}}}],["t[rt].r=mid_po_build(p1+1,ra,lb+p2,rb",{"_index":1478,"t":{"305":{"position":[[885,37]]},"307":{"position":[[621,37]]}}}],["t[rt].r=mid_pr_build(p1+1,ra,lb+p2+1,rb",{"_index":1473,"t":{"305":{"position":[[542,41]]},"307":{"position":[[392,41]]}}}],["tag{10",{"_index":1629,"t":{"327":{"position":[[1304,8]]}}}],["tag{1}",{"_index":312,"t":{"40":{"position":[[144,12]]}}}],["tag{1}3×3×3×4=108(1",{"_index":1680,"t":{"336":{"position":[[168,21]]}}}],["tag{1}acc=len(y)∑i(predi​==yi​)​(1",{"_index":1868,"t":{"372":{"position":[[196,36]]}}}],["tag{1}f(x)={0x​x<0x≥0​(1",{"_index":34,"t":{"2":{"position":[[515,26]]},"4":{"position":[[507,26]]},"15":{"position":[[515,26]]},"17":{"position":[[507,26]]}}}],["tag{1}h′=nh+n−1​(1",{"_index":450,"t":{"110":{"position":[[306,20]]}}}],["tag{1}l(y,z)=max(0,−y∗z)(1",{"_index":2046,"t":{"400":{"position":[[406,28]]}}}],["tag{1}lossmse​=∑[y−f(x)]2(1",{"_index":52,"t":{"6":{"position":[[136,29]]},"19":{"position":[[136,29]]}}}],["tag{1}min",{"_index":1892,"t":{"378":{"position":[[113,10]]}}}],["tag{1}s(yi​)=∑jn​eyjeyi​​(1",{"_index":44,"t":{"2":{"position":[[887,29]]},"4":{"position":[[879,29]]},"15":{"position":[[887,29]]},"17":{"position":[[879,29]]}}}],["tag{1}shapeoutput​=strideshapeinput​−sizekernel​+2∗padding​+1(1",{"_index":1666,"t":{"332":{"position":[[328,65]]}}}],["tag{1}yi,j​=h,w∑​wi,j,h,w​∗xh,w​(1",{"_index":1551,"t":{"314":{"position":[[142,36]]}}}],["tag{1}σ(x)=1+e−x1​(1",{"_index":7,"t":{"2":{"position":[[78,22]]},"4":{"position":[[70,22]]},"15":{"position":[[78,22]]},"17":{"position":[[70,22]]},"320":{"position":[[47,22]]}}}],["tag{2}",{"_index":317,"t":{"42":{"position":[[59,12]]}}}],["tag{2}3×3××3=27(2",{"_index":1684,"t":{"338":{"position":[[100,19]]}}}],["tag{2}dxd",{"_index":1570,"t":{"320":{"position":[[145,14]]}}}],["tag{2}dxdf(x)​={01​x<0x≥0​(2",{"_index":38,"t":{"2":{"position":[[651,30]]},"4":{"position":[[643,30]]},"15":{"position":[[651,30]]},"17":{"position":[[643,30]]}}}],["tag{2}dxdσ​=σ(1−σ)(2",{"_index":13,"t":{"2":{"position":[[169,22]]},"4":{"position":[[161,22]]},"15":{"position":[[169,22]]},"17":{"position":[[161,22]]}}}],["tag{2}l(w,b)+2λ​∥w∥12​(2",{"_index":1899,"t":{"380":{"position":[[110,26]]}}}],["tag{2}yi,j​=h,w∑​wi,j,h,w​∗xh,w​=a,b∑​vi,j,a,b​∗xi+a,j+b​(2",{"_index":1554,"t":{"314":{"position":[[364,61]]}}}],["tag{2}∥y−f(x)∥2​=2∑[y−f(x)]2​(2",{"_index":58,"t":{"6":{"position":[[244,33]]},"19":{"position":[[244,33]]}}}],["tag{3}1×1×3×4=12(3",{"_index":1688,"t":{"340":{"position":[[112,20]]}}}],["tag{3}a",{"_index":324,"t":{"44":{"position":[[210,11]]}}}],["tag{3}f(x)={0x​x<0x≥0​(3",{"_index":1573,"t":{"322":{"position":[[77,26]]}}}],["tag{3}yi,j​=a,b∑​vi,j,a,b​∗xi+a,j+b​=a,b∑​va,b​∗xi+a,j+b​(3",{"_index":1557,"t":{"314":{"position":[[636,61]]}}}],["tag{3}∂w∂​(l(w,b)+2λ​∥w∥12​)=∂w∂l(w,b)​+λw(3",{"_index":1904,"t":{"380":{"position":[[347,46]]}}}],["tag{4}dxdf(x)​={01​x<0x≥0​(4",{"_index":1575,"t":{"322":{"position":[[213,30]]}}}],["tag{4}wt+1​=(1−ηλ)wt​+η∂wt​∂l(wt​,bt​)​(4",{"_index":1910,"t":{"380":{"position":[[501,43]]}}}],["tag{4}yi,j​=a,b∑​va,b​∗xi+a,j+b​=a=−δ∑δ​b=−δ∑δ​va,b​∗xia​,j+b​(4",{"_index":1563,"t":{"316":{"position":[[194,66]]}}}],["tag{5}lossmse​=∑[y−f(x)]2(5",{"_index":1577,"t":{"325":{"position":[[112,29]]}}}],["tag{5}shapeoutput​=strideshapeinput​−sizekernel​+2∗padding​+1(5",{"_index":1567,"t":{"316":{"position":[[398,65]]}}}],["tag{6}∥y−f(x)∥2​=2∑[y−f(x)]2​(6",{"_index":1579,"t":{"325":{"position":[[220,33]]}}}],["tag{7",{"_index":1596,"t":{"327":{"position":[[403,7]]}}}],["taint",{"_index":1826,"t":{"359":{"position":[[356,7]]}}}],["target",{"_index":397,"t":{"101":{"position":[[635,6],[828,6],[938,6],[984,6]]}}}],["target_cc",{"_index":403,"t":{"101":{"position":[[692,9]]}}}],["target_cc_prefix)gcc",{"_index":404,"t":{"101":{"position":[[705,22]]}}}],["target_cc_prefix)ld",{"_index":425,"t":{"101":{"position":[[1063,21]]}}}],["target_ld",{"_index":424,"t":{"101":{"position":[[1050,9]]}}}],["tcp/ip",{"_index":2170,"t":{"456":{"position":[[34,13],[48,18]]}}}],["tcp和udp",{"_index":2172,"t":{"456":{"position":[[148,12]]}}}],["techniqu",{"_index":1691,"t":{"343":{"position":[[29,9]]}}}],["temp",{"_index":473,"t":{"124":{"position":[[158,4],[210,4],[253,13],[270,5],[377,4],[516,5]]},"126":{"position":[[244,4],[296,4],[339,13],[356,5],[463,4],[602,5]]}}}],["tensor(0.2684",{"_index":1646,"t":{"327":{"position":[[1695,14],[1881,14]]}}}],["tensor(0.2684)10​11",{"_index":1779,"t":{"355":{"position":[[1073,20]]}}}],["tensor(0.2684)python",{"_index":1785,"t":{"355":{"position":[[1268,20]]}}}],["tensor(1",{"_index":139,"t":{"21":{"position":[[1152,11]]},"23":{"position":[[1142,11]]},"405":{"position":[[1134,11]]}}}],["tensor(2",{"_index":141,"t":{"21":{"position":[[1175,11]]},"23":{"position":[[1165,11]]},"405":{"position":[[1157,11]]}}}],["tensor(3",{"_index":143,"t":{"21":{"position":[[1198,11]]},"23":{"position":[[1188,11]]},"405":{"position":[[1180,11]]}}}],["tensor(4",{"_index":140,"t":{"21":{"position":[[1164,10]]},"23":{"position":[[1154,10]]},"405":{"position":[[1146,10]]}}}],["tensor(5",{"_index":142,"t":{"21":{"position":[[1187,10]]},"23":{"position":[[1177,10]]},"405":{"position":[[1169,10]]}}}],["tensor(6",{"_index":144,"t":{"21":{"position":[[1210,10]]},"23":{"position":[[1200,10]]},"405":{"position":[[1192,10]]}}}],["tensor([1",{"_index":130,"t":{"21":{"position":[[961,10],[979,10],[1346,11]]},"23":{"position":[[951,10],[969,10],[1336,11]]},"405":{"position":[[947,10],[965,10],[1328,11]]}}}],["tensor([3",{"_index":148,"t":{"21":{"position":[[1385,11]]},"23":{"position":[[1375,11]]},"405":{"position":[[1367,11]]}}}],["tensor([4",{"_index":147,"t":{"21":{"position":[[1366,10]]},"23":{"position":[[1356,10]]},"405":{"position":[[1348,10]]}}}],["tensor([6",{"_index":149,"t":{"21":{"position":[[1405,10]]},"23":{"position":[[1395,10]]},"405":{"position":[[1387,10]]}}}],["tensor([[9",{"_index":107,"t":{"21":{"position":[[572,11]]},"23":{"position":[[562,11]]},"405":{"position":[[558,11]]}}}],["tensorflow框架,可以使用tensorboard",{"_index":2070,"t":{"417":{"position":[[3,36]]}}}],["tensor的*乘法是对tensor",{"_index":103,"t":{"21":{"position":[[468,28]]},"23":{"position":[[458,28]]},"405":{"position":[[454,28]]}}}],["terminal_proxy.sh",{"_index":2092,"t":{"430":{"position":[[0,23]]}}}],["test",{"_index":842,"t":{"260":{"position":[[29,4],[101,5],[232,4]]},"343":{"position":[[224,4],[277,7]]},"345":{"position":[[82,4],[240,4]]},"347":{"position":[[95,4],[115,5]]},"392":{"position":[[1771,5],[2519,4]]}}}],["test.first",{"_index":845,"t":{"260":{"position":[[107,10]]}}}],["test.four",{"_index":851,"t":{"260":{"position":[[176,9]]}}}],["test.second",{"_index":846,"t":{"260":{"position":[[125,11]]}}}],["test.third",{"_index":848,"t":{"260":{"position":[[149,10]]}}}],["test_acc",{"_index":2023,"t":{"392":{"position":[[2367,8],[2453,10]]}}}],["test_acc:.3f",{"_index":2027,"t":{"392":{"position":[[2528,16]]}}}],["test_it",{"_index":1982,"t":{"392":{"position":[[1391,10],[2405,10],[2668,9],[2787,10]]}}}],["text",{"_index":36,"t":{"2":{"position":[[568,7]]},"4":{"position":[[560,7]]},"15":{"position":[[568,7]]},"17":{"position":[[560,7]]},"322":{"position":[[130,7]]},"439":{"position":[[80,4]]}}}],["text{subject",{"_index":1887,"t":{"378":{"position":[[58,13]]}}}],["theta",{"_index":1891,"t":{"378":{"position":[[106,6],[192,23]]}}}],["third",{"_index":839,"t":{"258":{"position":[[52,6]]}}}],["third:\"method",{"_index":854,"t":{"260":{"position":[[258,13]]}}}],["thread",{"_index":181,"t":{"27":{"position":[[556,8],[565,6],[2169,6],[2238,8],[2247,6],[2324,6]]}}}],["three",{"_index":855,"t":{"260":{"position":[[272,7]]}}}],["time",{"_index":524,"t":{"136":{"position":[[143,6]]},"141":{"position":[[166,6]]},"143":{"position":[[68,6]]},"153":{"position":[[134,6]]},"155":{"position":[[70,6],[89,6],[115,6]]},"330":{"position":[[67,6],[98,6]]},"332":{"position":[[13,6],[22,6],[55,6],[66,6],[77,6],[115,6],[148,6],[158,6],[424,6],[435,6],[444,6],[453,6],[463,6]]},"334":{"position":[[93,6]]},"336":{"position":[[135,6],[144,6],[153,6]]},"338":{"position":[[70,6],[79,6],[86,6]]},"340":{"position":[[7,6],[80,6],[89,6],[98,6]]},"343":{"position":[[243,6],[290,5]]},"345":{"position":[[187,6]]}}}],["time.h",{"_index":767,"t":{"249":{"position":[[227,6]]}}}],["timer",{"_index":2000,"t":{"392":{"position":[[1784,6]]}}}],["timer.start",{"_index":2009,"t":{"392":{"position":[[1950,13]]}}}],["timer.stop",{"_index":2018,"t":{"392":{"position":[[2142,12]]}}}],["timer.sum():.1f",{"_index":2029,"t":{"392":{"position":[[2579,16]]}}}],["titile(if",{"_index":2121,"t":{"439":{"position":[[66,9]]}}}],["titl",{"_index":2115,"t":{"437":{"position":[[321,7]]},"439":{"position":[[100,5]]}}}],["title/window",{"_index":2120,"t":{"439":{"position":[[53,12]]}}}],["title文字不能垂直居中,可以更换为window",{"_index":2114,"t":{"437":{"position":[[295,25]]}}}],["tmp",{"_index":1376,"t":{"302":{"position":[[1435,5],[1545,5],[2882,5]]}}}],["tmp[0",{"_index":1379,"t":{"302":{"position":[[1555,7],[2915,8],[3051,7]]}}}],["tmp[2",{"_index":1381,"t":{"302":{"position":[[1567,7]]}}}],["tmp[20",{"_index":1434,"t":{"302":{"position":[[2832,8]]}}}],["tmp[3",{"_index":1437,"t":{"302":{"position":[[2944,8],[3059,8]]}}}],["tmp[max_vertex_num",{"_index":1369,"t":{"302":{"position":[[1156,20]]}}}],["toc",{"_index":741,"t":{"244":{"position":[[0,5]]}}}],["todo(\"start",{"_index":251,"t":{"27":{"position":[[2148,11]]}}}],["top",{"_index":667,"t":{"211":{"position":[[115,6],[144,6]]}}}],["topologicalsort",{"_index":1296,"t":{"300":{"position":[[1758,30]]}}}],["torch",{"_index":20,"t":{"2":{"position":[[322,5],[726,5]]},"4":{"position":[[314,5],[718,5]]},"15":{"position":[[322,5],[726,5]]},"17":{"position":[[314,5],[718,5]]},"320":{"position":[[299,5]]},"322":{"position":[[288,5]]},"392":{"position":[[7,5],[37,5],[112,5]]}}}],["torch.concat((a",{"_index":95,"t":{"21":{"position":[[309,16]]},"23":{"position":[[299,16]]},"405":{"position":[[295,16]]}}}],["torch.linspac",{"_index":21,"t":{"2":{"position":[[332,15],[736,15]]},"4":{"position":[[324,15],[728,15]]},"15":{"position":[[332,15],[736,15]]},"17":{"position":[[324,15],[728,15]]},"320":{"position":[[309,15]]},"322":{"position":[[298,15]]}}}],["torch.log",{"_index":1635,"t":{"327":{"position":[[1444,9]]}}}],["torch.log(torch.softmax(predict",{"_index":1642,"t":{"327":{"position":[[1590,32]]},"355":{"position":[[968,32]]}}}],["torch.nn",{"_index":16,"t":{"2":{"position":[[283,8],[687,8]]},"4":{"position":[[275,8],[679,8]]},"15":{"position":[[283,8],[687,8]]},"17":{"position":[[275,8],[679,8]]},"320":{"position":[[260,8]]},"322":{"position":[[249,8]]},"327":{"position":[[1481,8]]},"355":{"position":[[857,8]]},"392":{"position":[[64,8]]}}}],["torch.nn.crossentropyloss",{"_index":1994,"t":{"392":{"position":[[1630,27]]}}}],["torch.nn.crossentropyloss相当于torch.softmax",{"_index":1634,"t":{"327":{"position":[[1400,41]]}}}],["torch.nn.modul",{"_index":1965,"t":{"392":{"position":[[971,17]]}}}],["torch.nn.nllloss",{"_index":1636,"t":{"327":{"position":[[1456,17]]}}}],["torch.nn.sequenti",{"_index":1943,"t":{"392":{"position":[[505,20]]}}}],["torch.optim.sgd(net.paramet",{"_index":1992,"t":{"392":{"position":[[1582,33]]}}}],["torch.rand(4",{"_index":82,"t":{"21":{"position":[[118,13],[157,13]]},"23":{"position":[[108,13],[147,13]]},"405":{"position":[[104,13],[143,13]]}}}],["torch.size([2",{"_index":92,"t":{"21":{"position":[[283,14]]},"23":{"position":[[273,14]]},"405":{"position":[[269,14]]}}}],["torch.size([4",{"_index":99,"t":{"21":{"position":[[404,14]]},"23":{"position":[[394,14]]},"405":{"position":[[390,14]]}}}],["torch.stack((a",{"_index":88,"t":{"21":{"position":[[196,15]]},"23":{"position":[[186,15]]},"405":{"position":[[182,15]]}}}],["torch.sum(y_hat.argmax(dim=1",{"_index":1979,"t":{"392":{"position":[[1328,29]]}}}],["torch.tensor([1",{"_index":134,"t":{"21":{"position":[[1061,16]]},"23":{"position":[[1051,16]]},"327":{"position":[[1640,16],[1823,16]]},"355":{"position":[[1018,16],[1208,16]]},"405":{"position":[[1043,16]]}}}],["torch.tensor([4",{"_index":135,"t":{"21":{"position":[[1089,16]]},"23":{"position":[[1079,16]]},"405":{"position":[[1071,16]]}}}],["torch.tensor([[0.1",{"_index":117,"t":{"21":{"position":[[791,19]]},"23":{"position":[[781,19]]},"405":{"position":[[777,19]]}}}],["torch.tensor([[1",{"_index":145,"t":{"21":{"position":[[1229,17]]},"23":{"position":[[1219,17]]},"405":{"position":[[1211,17]]}}}],["torch.tensor([[2",{"_index":1641,"t":{"327":{"position":[[1543,17],[1778,17]]},"355":{"position":[[921,17],[1162,17]]}}}],["torch.tensor([[3",{"_index":104,"t":{"21":{"position":[[501,17]]},"23":{"position":[[491,17]]},"405":{"position":[[487,17]]}}}],["torch.tensor([[4",{"_index":146,"t":{"21":{"position":[[1270,17]]},"23":{"position":[[1260,17]]},"405":{"position":[[1252,17]]}}}],["train",{"_index":1703,"t":{"343":{"position":[[214,5]]},"345":{"position":[[144,8]]},"347":{"position":[[0,5],[14,5]]},"392":{"position":[[1758,6],[2492,5]]}}}],["train(lenet",{"_index":2037,"t":{"392":{"position":[[2762,12]]}}}],["train(net",{"_index":1980,"t":{"392":{"position":[[1368,10]]}}}],["train_acc",{"_index":2021,"t":{"392":{"position":[[2187,9],[2349,10]]}}}],["train_acc:.3f",{"_index":2026,"t":{"392":{"position":[[2502,16]]}}}],["train_it",{"_index":1981,"t":{"392":{"position":[[1379,11],[2656,11],[2775,11]]}}}],["train_l",{"_index":2019,"t":{"392":{"position":[[2155,7],[2339,9]]}}}],["train_l:.3f",{"_index":2025,"t":{"392":{"position":[[2477,14]]}}}],["transform",{"_index":980,"t":{"275":{"position":[[1413,9]]}}}],["treasur",{"_index":459,"t":{"118":{"position":[[83,8]]}}}],["treat",{"_index":389,"t":{"101":{"position":[[336,7]]}}}],["trove",{"_index":460,"t":{"118":{"position":[[92,5]]}}}],["true",{"_index":881,"t":{"260":{"position":[[834,5]]},"287":{"position":[[231,11]]},"291":{"position":[[198,16]]},"298":{"position":[[698,5],[1559,5]]},"300":{"position":[[1047,5],[1108,6]]},"302":{"position":[[71,4]]}}}],["true;//加入拓扑排序的顶点为n",{"_index":1312,"t":{"300":{"position":[[2214,30]]}}}],["truth做bc",{"_index":515,"t":{"134":{"position":[[64,9]]}}}],["truth的bc",{"_index":518,"t":{"134":{"position":[[119,9]]}}}],["two",{"_index":850,"t":{"260":{"position":[[170,5]]}}}],["type(m",{"_index":1985,"t":{"392":{"position":[[1451,7],[1475,7]]}}}],["typedef",{"_index":857,"t":{"260":{"position":[[323,7],[540,7]]},"262":{"position":[[0,7]]},"298":{"position":[[126,7],[274,7]]},"300":{"position":[[198,7],[415,7]]},"302":{"position":[[165,7],[185,7],[357,7],[390,7],[489,7],[585,7]]},"305":{"position":[[100,7]]},"307":{"position":[[100,7]]},"309":{"position":[[186,7]]}}}],["u",{"_index":500,"t":{"132":{"position":[[0,4],[124,1]]},"246":{"position":[[198,1],[218,1],[239,1]]},"302":{"position":[[1992,2],[2264,3],[2310,6],[2512,9]]},"439":{"position":[[347,1]]}}}],["u,int",{"_index":1398,"t":{"302":{"position":[[2247,5]]}}}],["u==v",{"_index":1404,"t":{"302":{"position":[[2361,6]]}}}],["u=q.front();//取队首顶点u",{"_index":1302,"t":{"300":{"position":[[1926,20]]}}}],["udg",{"_index":1340,"t":{"302":{"position":[[374,4]]}}}],["ull",{"_index":804,"t":{"255":{"position":[[381,3],[588,3]]}}}],["uncertainti",{"_index":1588,"t":{"327":{"position":[[139,12]]}}}],["unchang",{"_index":1871,"t":{"372":{"position":[[322,9]]}}}],["uncom",{"_index":296,"t":{"29":{"position":[[851,9]]},"101":{"position":[[1907,9]]}}}],["undefin",{"_index":428,"t":{"101":{"position":[[1101,9]]}}}],["uniqu",{"_index":981,"t":{"275":{"position":[[1448,6],[1520,9]]}}}],["unique_copi",{"_index":982,"t":{"275":{"position":[[1491,11]]}}}],["unit",{"_index":28,"t":{"2":{"position":[[426,5]]},"4":{"position":[[418,5]]},"15":{"position":[[426,5]]},"17":{"position":[[418,5]]}}}],["unordered_map",{"_index":675,"t":{"211":{"position":[[236,14]]},"229":{"position":[[136,14]]}}}],["unordered_multimap",{"_index":677,"t":{"211":{"position":[[271,22]]}}}],["unordered_multimap的操作和set或者map等的操作基本一致,唯一的区别就是不支持类似lower_bound",{"_index":733,"t":{"229":{"position":[[171,69]]}}}],["unordered_multiset",{"_index":676,"t":{"211":{"position":[[251,19]]},"229":{"position":[[151,19]]}}}],["unordered_set",{"_index":674,"t":{"211":{"position":[[221,14]]},"229":{"position":[[121,14]]}}}],["unset",{"_index":2099,"t":{"430":{"position":[[222,5],[238,5],[255,5]]}}}],["unsign",{"_index":805,"t":{"255":{"position":[[385,8]]},"295":{"position":[[1001,8]]}}}],["until",{"_index":2052,"t":{"400":{"position":[[646,5]]}}}],["updat",{"_index":1719,"t":{"347":{"position":[[47,6]]}}}],["update='append",{"_index":2083,"t":{"424":{"position":[[261,16],[336,16]]},"426":{"position":[[227,16]]}}}],["upper_bound",{"_index":999,"t":{"275":{"position":[[2014,11]]}}}],["us",{"_index":206,"t":{"27":{"position":[[931,4]]},"29":{"position":[[969,6]]},"101":{"position":[[760,4],[1206,4],[2025,6]]},"126":{"position":[[65,5]]},"195":{"position":[[25,5]]},"201":{"position":[[25,5]]},"207":{"position":[[25,5]]},"251":{"position":[[0,5]]},"253":{"position":[[250,5]]},"277":{"position":[[1481,5]]},"298":{"position":[[53,5]]},"300":{"position":[[119,5]]},"305":{"position":[[79,5]]},"307":{"position":[[79,5]]},"309":{"position":[[165,5]]},"343":{"position":[[39,4],[250,5]]},"345":{"position":[[70,4],[132,4],[215,4]]},"347":{"position":[[126,4]]}}}],["user",{"_index":934,"t":{"273":{"position":[[839,5]]}}}],["util",{"_index":1118,"t":{"283":{"position":[[0,11]]}}}],["v",{"_index":1392,"t":{"302":{"position":[[1995,8],[2612,2]]},"305":{"position":[[1118,2]]},"307":{"position":[[820,2]]}}}],["v,int",{"_index":1399,"t":{"302":{"position":[[2253,5]]}}}],["v.push_back(w",{"_index":1484,"t":{"305":{"position":[[1179,15]]},"307":{"position":[[881,15]]}}}],["v1",{"_index":528,"t":{"141":{"position":[[21,12]]}}}],["v=g.vertexs[u].connectors[i];//u的后继节点v",{"_index":1304,"t":{"300":{"position":[[2011,38]]}}}],["v_n",{"_index":311,"t":{"40":{"position":[[137,6]]},"44":{"position":[[171,4],[203,6]]}}}],["v_t",{"_index":309,"t":{"40":{"position":[[127,4]]},"42":{"position":[[53,5]]},"44":{"position":[[193,4]]}}}],["val",{"_index":1035,"t":{"277":{"position":[[677,4],[722,6],[1038,4]]},"279":{"position":[[497,4],[698,4],[743,6],[982,4]]}}}],["valid",{"_index":1690,"t":{"343":{"position":[[13,10],[139,10]]},"345":{"position":[[34,11]]},"347":{"position":[[54,10]]}}}],["valu",{"_index":1132,"t":{"285":{"position":[[361,6],[510,8]]}}}],["var",{"_index":2156,"t":{"450":{"position":[[260,4],[294,4],[333,4],[373,4]]}}}],["vc++6.0中指针初始化为0xcccccccc",{"_index":1363,"t":{"302":{"position":[[873,26]]}}}],["vc++6.0中指针初始化为0xcccccccc,如果不将指针初始化为null",{"_index":1372,"t":{"302":{"position":[[1221,45]]}}}],["vec",{"_index":486,"t":{"126":{"position":[[819,4]]}}}],["vec.empti",{"_index":491,"t":{"126":{"position":[[970,13]]}}}],["vec.push_back(remaind",{"_index":489,"t":{"126":{"position":[[903,25]]}}}],["vec.rbegin",{"_index":494,"t":{"126":{"position":[[1023,13]]}}}],["vec.rend",{"_index":495,"t":{"126":{"position":[[1043,11]]}}}],["vector",{"_index":659,"t":{"211":{"position":[[0,17]]},"213":{"position":[[94,19],[224,12]]},"251":{"position":[[68,6]]},"277":{"position":[[0,10],[149,81],[231,27],[433,12],[1301,33],[1338,55],[1472,8]]},"298":{"position":[[29,8]]},"300":{"position":[[50,8]]}}}],["vector::iter",{"_index":1278,"t":{"300":{"position":[[1177,24]]}}}],["vector为底层容器,堆heap",{"_index":921,"t":{"273":{"position":[[448,41]]}}}],["vector对象,存储的是int",{"_index":1023,"t":{"277":{"position":[[277,27]]}}}],["vector对象,并从由迭代器first和last定义的序列[first",{"_index":1028,"t":{"277":{"position":[[380,41]]}}}],["vector的s",{"_index":682,"t":{"213":{"position":[[61,18]]}}}],["ver",{"_index":1352,"t":{"302":{"position":[[644,5]]}}}],["veri",{"_index":302,"t":{"29":{"position":[[962,4]]},"101":{"position":[[2018,4]]}}}],["vernum",{"_index":1358,"t":{"302":{"position":[[798,7],[1339,7],[1456,8]]}}}],["vernum,arcnum",{"_index":1350,"t":{"302":{"position":[[621,14]]}}}],["vers[a].firstarc",{"_index":1389,"t":{"302":{"position":[[1752,18],[1793,18],[1875,17]]}}}],["vers[i].data=tmp[i",{"_index":1378,"t":{"302":{"position":[[1473,21]]}}}],["vers[i].firstarc",{"_index":1361,"t":{"302":{"position":[[847,17]]}}}],["version/src/project0/build目录下执行mak",{"_index":383,"t":{"101":{"position":[[43,35]]}}}],["version/src/projecti/build",{"_index":369,"t":{"91":{"position":[[287,33]]},"95":{"position":[[114,29]]}}}],["version/src/projecti/build文件夹下进行,即要在终端中通过cd",{"_index":260,"t":{"29":{"position":[[17,49]]}}}],["version/src/projecti/build目录下创建.bochsrc",{"_index":439,"t":{"101":{"position":[[1379,41]]}}}],["version/src/projecti/build目录下的makefi",{"_index":396,"t":{"101":{"position":[[593,39],[1158,39]]}}}],["version/src/projecti/build目录下的makefie文件(由于每个project下都存在一个对应的makefil",{"_index":390,"t":{"101":{"position":[[369,71]]}}}],["version/src/projecti/src/geeko",{"_index":368,"t":{"91":{"position":[[232,37]]}}}],["version/src/projecti/src/geekos/main.c",{"_index":152,"t":{"27":{"position":[[9,40]]}}}],["version/src/目录下会存在project0",{"_index":365,"t":{"91":{"position":[[131,26]]}}}],["vert",{"_index":1888,"t":{"378":{"position":[[83,5]]},"380":{"position":[[92,5],[273,5]]}}}],["vert^2_1",{"_index":1889,"t":{"378":{"position":[[91,9]]},"380":{"position":[[100,9],[281,9]]}}}],["vert_2",{"_index":56,"t":{"6":{"position":[[205,7]]},"19":{"position":[[205,7]]},"325":{"position":[[181,7]]}}}],["vertex",{"_index":858,"t":{"260":{"position":[[338,6],[414,8],[516,7],[578,8]]},"298":{"position":[[141,6],[198,8],[250,7],[312,8]]},"300":{"position":[[213,6],[289,8],[391,7],[453,8]]}}}],["vertex(int",{"_index":863,"t":{"260":{"position":[[460,10]]},"298":{"position":[[219,10]]},"300":{"position":[[335,10]]}}}],["vertexs.resize(1",{"_index":877,"t":{"260":{"position":[[752,18]]},"300":{"position":[[627,18]]}}}],["vertexs.resize(n",{"_index":874,"t":{"260":{"position":[[697,18]]},"298":{"position":[[431,18]]},"300":{"position":[[572,18]]}}}],["vertexs.s",{"_index":1218,"t":{"298":{"position":[[516,16]]},"300":{"position":[[712,16]]}}}],["vertexs[id1].connectors.push_back(id2",{"_index":1219,"t":{"298":{"position":[[560,39],[609,39]]},"300":{"position":[[756,39],[856,39]]}}}],["vertexs[id1].indegre",{"_index":1270,"t":{"300":{"position":[[962,24]]}}}],["vertexs[id1].outdegre",{"_index":1268,"t":{"300":{"position":[[796,25],[936,25]]}}}],["vertexs[id2].connectors.push_back(id1",{"_index":1220,"t":{"298":{"position":[[649,39]]},"300":{"position":[[896,39]]}}}],["vertexs[id2].indegre",{"_index":1269,"t":{"300":{"position":[[822,24],[1013,24]]}}}],["vertexs[id2].outdegre",{"_index":1271,"t":{"300":{"position":[[987,25]]}}}],["vertexs[id].connectors.s",{"_index":1232,"t":{"298":{"position":[[920,30],[1353,30]]}}}],["vertexs[id].connectors[i",{"_index":1233,"t":{"298":{"position":[[968,26],[1401,26]]}}}],["vertextyp",{"_index":1332,"t":{"302":{"position":[[218,10],[511,10],[941,10],[2108,10]]}}}],["vertic",{"_index":2124,"t":{"439":{"position":[[124,11]]}}}],["vga_update_interv",{"_index":290,"t":{"29":{"position":[[749,20]]},"101":{"position":[[1805,20]]}}}],["vgaromimag",{"_index":272,"t":{"29":{"position":[[453,12]]},"101":{"position":[[1509,12]]}}}],["vi",{"_index":2076,"t":{"424":{"position":[[26,3]]},"426":{"position":[[26,3]]}}}],["vis.line([0",{"_index":2078,"t":{"424":{"position":[[116,14]]}}}],["vis.line([[0",{"_index":2086,"t":{"426":{"position":[[41,14]]}}}],["vis.line([loss.item",{"_index":2084,"t":{"424":{"position":[[289,23]]}}}],["visdom",{"_index":2074,"t":{"419":{"position":[[12,6]]},"422":{"position":[[0,28],[37,6]]},"424":{"position":[[5,6],[19,6],[32,8]]},"426":{"position":[[5,6],[19,6],[32,8]]}}}],["visdom.serv",{"_index":2075,"t":{"422":{"position":[[54,13]]}}}],["visit",{"_index":1223,"t":{"298":{"position":[[753,8],[1132,8]]}}}],["visit(vertextyp",{"_index":1335,"t":{"302":{"position":[[290,16]]}}}],["visit[cnt]=0",{"_index":1440,"t":{"302":{"position":[[2992,13]]}}}],["visit[max_vertex_num",{"_index":1393,"t":{"302":{"position":[[2077,22]]}}}],["visit[p",{"_index":1413,"t":{"302":{"position":[[2568,8]]}}}],["visit[u]=0",{"_index":1417,"t":{"302":{"position":[[2679,11]]}}}],["visit[u]=1",{"_index":1401,"t":{"302":{"position":[[2298,11]]}}}],["visited.count(id1",{"_index":1234,"t":{"298":{"position":[[998,19],[1431,19]]}}}],["visited.insert(id1",{"_index":1236,"t":{"298":{"position":[[1044,20],[1497,20]]}}}],["visited.insert(start",{"_index":1227,"t":{"298":{"position":[[802,22],[1216,22]]}}}],["viz.line([[y1",{"_index":2090,"t":{"426":{"position":[[177,14]]}}}],["viz.line([real_y_data",{"_index":2081,"t":{"424":{"position":[[208,23]]}}}],["vnode",{"_index":1346,"t":{"302":{"position":[[504,6],[547,7]]}}}],["vnv_nvn",{"_index":346,"t":{"76":{"position":[[68,21]]}}}],["voc中,类别种类为20类,因此在预测阶段输出的[7",{"_index":546,"t":{"141":{"position":[[422,27]]}}}],["void",{"_index":154,"t":{"27":{"position":[[82,4],[1294,4],[1833,4]]},"300":{"position":[[1073,4]]},"302":{"position":[[285,4],[2215,4]]},"305":{"position":[[1073,4],[1360,4],[1475,4],[1590,4]]},"307":{"position":[[775,4],[1041,4],[1148,4],[1255,4]]},"309":{"position":[[539,4]]}}}],["vrtype",{"_index":1333,"t":{"302":{"position":[[248,6],[433,6]]}}}],["vtv_tvt",{"_index":345,"t":{"76":{"position":[[47,20]]}}}],["vt​∪vn​)∗(1",{"_index":313,"t":{"40":{"position":[[157,15]]}}}],["vt​∪vn​)∗(3",{"_index":325,"t":{"44":{"position":[[229,15]]}}}],["vt∗(2)",{"_index":314,"t":{"42":{"position":[[6,9]]}}}],["vt∗​(2",{"_index":318,"t":{"42":{"position":[[72,9]]}}}],["vt∪vn)∗(1)",{"_index":306,"t":{"40":{"position":[[73,14]]}}}],["vt∪vn)∗(3)\\alpha",{"_index":322,"t":{"44":{"position":[[115,19]]}}}],["v当前是第k",{"_index":1400,"t":{"302":{"position":[[2268,10]]}}}],["v的入度减1",{"_index":1306,"t":{"300":{"position":[[2067,9]]}}}],["w",{"_index":541,"t":{"141":{"position":[[307,2]]},"332":{"position":[[451,1]]},"353":{"position":[[83,17]]},"355":{"position":[[177,4],[410,1],[778,4],[1359,3],[1395,2],[1401,3],[1604,3],[1644,2],[1650,3],[1743,1],[1787,1],[2032,3],[2072,2],[2078,3],[2116,2]]},"357":{"position":[[129,4],[221,2],[245,2],[356,5]]},"359":{"position":[[115,23],[152,33]]},"361":{"position":[[66,1],[98,1]]},"378":{"position":[[89,1]]},"380":{"position":[[98,1],[237,2],[279,1],[332,2],[345,1]]},"400":{"position":[[564,1],[608,1],[612,1]]}}}],["w')o(co​×ci​×h×w×h′×w",{"_index":1668,"t":{"332":{"position":[[470,23]]}}}],["w'co​×h′×w",{"_index":1663,"t":{"332":{"position":[[165,11]]}}}],["w(l(w,b)+λ2∥w∥12)=∂l(w,b)∂w+λw(3)\\frac{\\partial}{\\parti",{"_index":1900,"t":{"380":{"position":[[177,59]]}}}],["w<0",{"_index":1757,"t":{"355":{"position":[[516,3],[1824,3]]},"357":{"position":[[262,3]]}}}],["w=q.front",{"_index":1483,"t":{"305":{"position":[[1157,12]]},"307":{"position":[[859,12]]}}}],["w>0",{"_index":1756,"t":{"355":{"position":[[504,3],[1780,3]]},"357":{"position":[[238,3]]}}}],["w][b,1,h,w]的tensor",{"_index":572,"t":{"149":{"position":[[196,46]]}}}],["w][b,1,h,w]的tensor,再将二者concat后通过7×77",{"_index":576,"t":{"153":{"position":[[97,36]]}}}],["w][b,c,h,w]分别经过最大池化和平均池化来压缩空间维度、学习通道之间的特征,得到[b,c,1,1][b",{"_index":573,"t":{"151":{"position":[[24,56]]}}}],["w][b,c,h,w]分别经过最大池化和平均池化(通过torch.max和torch.mean函数实现)得到[b,1,h,w][b",{"_index":575,"t":{"153":{"position":[[24,66]]}}}],["w][b,c,h,w]的特征图通过池化挤压宽高维度,得到[b,c,1,1][b",{"_index":587,"t":{"159":{"position":[[56,40]]}}}],["w][b,c,h,w]经过空间注意力机制算法得到[b,1,h,w][b",{"_index":571,"t":{"149":{"position":[[153,36]]}}}],["w][b,c,h,w]经过通道注意力机制算法得到[b,c,1,1][b",{"_index":569,"t":{"149":{"position":[[30,36]]}}}],["w^{2}l2​=(wx+b−y)2+λw2",{"_index":1762,"t":{"355":{"position":[[640,22]]}}}],["w_t",{"_index":1909,"t":{"380":{"position":[[496,4]]}}}],["w_{\\text",{"_index":1788,"t":{"355":{"position":[[1342,8],[1587,8],[2015,8]]}}}],["wall",{"_index":435,"t":{"101":{"position":[[1266,4],[1315,4]]}}}],["warn",{"_index":387,"t":{"101":{"position":[[321,8]]}}}],["wci​×h×w",{"_index":1656,"t":{"332":{"position":[[29,8]]}}}],["weight",{"_index":1344,"t":{"302":{"position":[[440,7]]},"353":{"position":[[101,8]]},"372":{"position":[[336,7]]},"450":{"position":[[420,7]]}}}],["welcom",{"_index":2150,"t":{"450":{"position":[[101,8]]}}}],["werror",{"_index":394,"t":{"101":{"position":[[507,6]]}}}],["while(!q.empti",{"_index":1301,"t":{"300":{"position":[[1903,18]]},"305":{"position":[[1133,17]]},"307":{"position":[[835,17]]}}}],["while(1",{"_index":162,"t":{"27":{"position":[[150,8],[1362,8]]}}}],["while(cin",{"_index":1051,"t":{"277":{"position":[[1537,9]]}}}],["while(g.s",{"_index":1228,"t":{"298":{"position":[[825,14],[1273,14]]}}}],["while(in[p1]!=pre[rt",{"_index":1534,"t":{"309":{"position":[[398,22]]}}}],["while(it!=g.vertexs.end",{"_index":1280,"t":{"300":{"position":[[1226,27]]}}}],["while(m",{"_index":1275,"t":{"300":{"position":[[1128,7]]}}}],["while(mid[p1]!=rt",{"_index":1467,"t":{"305":{"position":[[431,18],[774,18]]},"307":{"position":[[315,18],[544,18]]}}}],["while(~scanf(\"%d\",&n",{"_index":1504,"t":{"305":{"position":[[1720,22]]},"307":{"position":[[1385,22]]}}}],["white",{"_index":464,"t":{"118":{"position":[[151,5]]}}}],["win='win_id",{"_index":2079,"t":{"424":{"position":[[137,13],[247,13],[322,13]]},"426":{"position":[[68,13],[213,13]]}}}],["window",{"_index":2141,"t":{"442":{"position":[[0,20]]}}}],["wise",{"_index":522,"t":{"136":{"position":[[73,7]]}}}],["wise)以及逐点(point",{"_index":521,"t":{"136":{"position":[[57,15]]}}}],["wnew",{"_index":1764,"t":{"355":{"position":[[736,4],[1296,4],[1488,4],[1964,4]]},"357":{"position":[[106,4],[150,4],[327,4]]}}}],["work",{"_index":402,"t":{"101":{"position":[[681,5]]}}}],["work.109",{"_index":423,"t":{"101":{"position":[[1040,9]]}}}],["write",{"_index":297,"t":{"29":{"position":[[869,5]]},"101":{"position":[[1925,5]]}}}],["wt+1=(1−ηλ)wt+η∂l(wt,bt)∂wt(4)w_{t+1}=(1",{"_index":1905,"t":{"380":{"position":[[394,40]]}}}],["wwnew",{"_index":1821,"t":{"357":{"position":[[375,5]]}}}],["www和偏置项bbb",{"_index":2048,"t":{"400":{"position":[[458,39]]}}}],["w}=\\left\\{\\begin{array}{l",{"_index":1755,"t":{"355":{"position":[[472,27]]}}}],["w}\\end{aligned}wnew",{"_index":1769,"t":{"355":{"position":[[815,19]]}}}],["w}\\right",{"_index":1799,"t":{"355":{"position":[[1705,9]]}}}],["w−h",{"_index":1814,"t":{"357":{"position":[[140,5]]}}}],["w−h)−2λw",{"_index":1822,"t":{"357":{"position":[[381,11]]}}}],["w−h)−2λww_{\\text",{"_index":1820,"t":{"357":{"position":[[332,18]]}}}],["w−h)−λ,(w−h)+λ,​w>0w<0",{"_index":1819,"t":{"357":{"position":[[289,27]]}}}],["w−h)−λ,w>0(w−h)+λ,w<0w_{\\text",{"_index":1815,"t":{"357":{"position":[[155,32]]}}}],["w−hw_{\\text",{"_index":1812,"t":{"357":{"position":[[111,12]]}}}],["w−η∂l1∂w=w−η⋅[2x(wx+b−y)+λd∣w∣dw]={w−η⋅[2x(wx+b−y)+λ]w>0w−η⋅[2x(wx+b−y)−λ]w<0\\begin{align",{"_index":1794,"t":{"355":{"position":[[1493,93]]}}}],["w−η∂l2∂w=w−η⋅[2x(wx+b−y)+2λw]\\begin{align",{"_index":1805,"t":{"355":{"position":[[1969,45]]}}}],["w−η∂l∂w=w−η⋅[2x(wx+b−y)]\\begin{align",{"_index":1787,"t":{"355":{"position":[[1301,40]]}}}],["w−η∂l∂w\\begin{aligned}w_{\\text",{"_index":1765,"t":{"355":{"position":[[741,31]]}}}],["w−η∂w∂l",{"_index":1770,"t":{"355":{"position":[[835,11]]}}}],["w−η∂w∂l1​​=w−η⋅[2x(wx+b−y)+λdwd∣w∣​]={w−η⋅[2x(wx+b−y)+λ]w−η⋅[2x(wx+b−y)−λ]​w>0w<0",{"_index":1803,"t":{"355":{"position":[[1867,86]]}}}],["w−η∂w∂l2​​=w−η⋅[2x(wx+b−y)+2λw",{"_index":1808,"t":{"355":{"position":[[2137,35]]}}}],["w−η∂w∂l​=w−η⋅[2x(wx+b−i",{"_index":1792,"t":{"355":{"position":[[1448,29]]}}}],["w∥12​≤θ(1",{"_index":1893,"t":{"378":{"position":[[142,11]]}}}],["w∥12≤θ(1)min",{"_index":1885,"t":{"378":{"position":[[22,13]]}}}],["w∥1=∣w1∣+∣w2∣+…+∣wn∣\\|\\mathbf{w}\\|_{1}=\\left|w_{1}\\right|+\\left|w_{2}\\right|+\\ldots+\\left|w_{n}\\right|∥w∥1​=∣w1​∣+∣w2​∣+…+∣wn",{"_index":1731,"t":{"351":{"position":[[70,128]]}}}],["w∥2=(∣w1∣2+∣w2∣2+…+∣wn∣2)12\\|\\mathbf{w}\\|_{2}=\\left(\\left|w_{1}\\right|^{2}+\\left|w_{2}\\right|^{2}+\\ldots+\\left|w_{n}\\right|^{2}\\right)^{\\frac{1}{2}}∥w∥2​=(∣w1​∣2+∣w2​∣2+…+∣wn​∣2)21",{"_index":1733,"t":{"351":{"position":[[249,182]]}}}],["w∥p=(∣w1∣p+∣w2∣p+…+∣wn∣p)1p\\|\\mathbf{w}\\|_{p}=\\left(\\left|w_{1}\\right|^{p}+\\left|w_{2}\\right|^{p}+\\ldots+\\left|w_{n}\\right|^{p}\\right)^{\\frac{1}{p}}∥w∥p​=(∣w1​∣p+∣w2​∣p+…+∣wn​∣p)p1",{"_index":1734,"t":{"351":{"position":[[439,182]]}}}],["w。就如公式15和16",{"_index":1827,"t":{"359":{"position":[[364,19]]}}}],["w上,从而使其较少为负。因此,这具有将w推向0",{"_index":1846,"t":{"363":{"position":[[66,31]]}}}],["w为bbox的宽高,c为该bbox是否存在object",{"_index":544,"t":{"141":{"position":[[337,30]]}}}],["w和b",{"_index":1829,"t":{"359":{"position":[[408,5]]}}}],["w归约为文法开始符号",{"_index":359,"t":{"81":{"position":[[103,31]]}}}],["w推向0如何有助于l1正则化中的过拟合?如上所述,随着w变为0,我们正在通过降低变量的重要性来减少功能的数量。在上面的方程式中,我们看到x_2,x_4和x_5",{"_index":1858,"t":{"363":{"position":[[418,91]]}}}],["w更小。相反,在等式3.2中,如果w",{"_index":1845,"t":{"363":{"position":[[39,24]]}}}],["w的符号就可以实现l1",{"_index":1842,"t":{"361":{"position":[[236,36]]}}}],["x",{"_index":6,"t":{"2":{"position":[[74,3],[328,1],[480,1],[488,1],[492,1],[616,1],[628,1],[732,1]]},"4":{"position":[[66,3],[320,1],[472,1],[480,1],[484,1],[608,1],[620,1],[724,1]]},"15":{"position":[[74,3],[328,1],[480,1],[488,1],[492,1],[616,1],[628,1],[732,1]]},"17":{"position":[[66,3],[320,1],[472,1],[480,1],[484,1],[608,1],[620,1],[724,1]]},"195":{"position":[[100,1],[124,2],[147,1],[155,1]]},"197":{"position":[[91,2],[114,1],[131,1]]},"221":{"position":[[118,3]]},"227":{"position":[[241,12],[274,10]]},"255":{"position":[[41,3],[69,5],[84,3]]},"265":{"position":[[4,2]]},"277":{"position":[[525,8],[1534,2],[1550,2]]},"279":{"position":[[290,8],[317,8]]},"320":{"position":[[43,3],[305,1]]},"322":{"position":[[42,1],[50,1],[54,1],[178,1],[190,1],[294,1]]},"355":{"position":[[847,1]]},"359":{"position":[[199,3]]},"392":{"position":[[382,3],[873,3],[1092,2],[1135,1],[1157,1],[1162,2],[1171,1],[1917,3],[1986,2]]}}}],["x(w",{"_index":1790,"t":{"355":{"position":[[1418,3],[1672,3],[1758,3],[1802,3],[2095,3]]},"357":{"position":[[65,3]]}}}],["x)%mod",{"_index":793,"t":{"255":{"position":[[115,9]]}}}],["x)=11+e−x(1)\\sigma(x",{"_index":3,"t":{"2":{"position":[[31,23]]},"4":{"position":[[23,23]]},"15":{"position":[[31,23]]},"17":{"position":[[23,23]]},"320":{"position":[[0,23]]}}}],["x)>(i",{"_index":790,"t":{"255":{"position":[[28,10]]}}}],["x+b",{"_index":1749,"t":{"355":{"position":[[182,3],[322,3],[621,3],[1422,3],[1676,3],[1762,3],[1806,3],[2099,3]]},"357":{"position":[[69,3]]}}}],["x+by^​=wx+b",{"_index":1742,"t":{"353":{"position":[[71,11]]}}}],["x.reshap",{"_index":1938,"t":{"392":{"position":[[393,10]]}}}],["x.shape[0",{"_index":2017,"t":{"392":{"position":[[2099,11]]}}}],["x.to(devic",{"_index":1972,"t":{"392":{"position":[[1139,13],[1175,12],[1993,13]]}}}],["x86",{"_index":363,"t":{"91":{"position":[[89,12]]},"101":{"position":[[835,5]]}}}],["x86/elf",{"_index":410,"t":{"101":{"position":[[854,7]]}}}],["x86_64与i386",{"_index":395,"t":{"101":{"position":[[561,16]]}}}],["x_{1",{"_index":1849,"t":{"363":{"position":[[272,5]]}}}],["x_{1}+w_{2",{"_index":1738,"t":{"351":{"position":[[747,11]]}}}],["x_{2}+0.3251",{"_index":1851,"t":{"363":{"position":[[285,12]]}}}],["x_{2}+\\ldots+w_{n",{"_index":1739,"t":{"351":{"position":[[759,18]]}}}],["x_{3}+0.0009",{"_index":1852,"t":{"363":{"position":[[298,12]]}}}],["x_{4}+0.0001",{"_index":1853,"t":{"363":{"position":[[311,12]]}}}],["x_{5",{"_index":1854,"t":{"363":{"position":[[324,5]]}}}],["x_{6",{"_index":1856,"t":{"363":{"position":[[337,5]]}}}],["x_{n}+by^​=w1​x1​+w2​x2​+…+wn​xn​+b",{"_index":1740,"t":{"351":{"position":[[778,35]]}}}],["xi",{"_index":2051,"t":{"400":{"position":[[621,2]]}}}],["xlim=[1",{"_index":1998,"t":{"392":{"position":[[1714,8]]}}}],["xor异或等非线性问题,导致第一次ai",{"_index":2056,"t":{"402":{"position":[[43,35]]}}}],["xxx.plasmoid",{"_index":2140,"t":{"439":{"position":[[349,12]]}}}],["xxx的可能取值为x=x1,x2,...,xnx=x_1,x_2,...,x_nx=x1​,x2​,...,xn​,而取值事件xix_ixi​发生的概率为pip_ipi",{"_index":1589,"t":{"327":{"position":[[152,106]]}}}],["xxx,则在第一个任务完成后,每隔xxx",{"_index":452,"t":{"112":{"position":[[0,55]]}}}],["x为100时,sigmoid(x)就接近于0",{"_index":25,"t":{"2":{"position":[[377,24]]},"4":{"position":[[369,24]]},"15":{"position":[[377,24]]},"17":{"position":[[369,24]]},"320":{"position":[[354,24]]}}}],["x和y)。仅根据公式中的模型和数据更新权重会导致过拟合,从而导致模型泛化性不好。另一方面,在等式15,16中,w",{"_index":1830,"t":{"359":{"position":[[418,91]]}}}],["x,输出所有x",{"_index":725,"t":{"227":{"position":[[174,13]]}}}],["y",{"_index":54,"t":{"6":{"position":[[196,1]]},"19":{"position":[[196,1]]},"141":{"position":[[301,2]]},"195":{"position":[[111,1],[134,1],[138,1],[175,2]]},"197":{"position":[[101,1],[105,1]]},"255":{"position":[[47,4],[77,4],[90,4]]},"325":{"position":[[172,1]]},"355":{"position":[[1426,3],[1810,2]]},"359":{"position":[[203,144]]},"392":{"position":[[1095,1],[1188,1],[1237,3],[1317,3],[1361,2],[1921,2],[1989,1],[2051,2],[2127,3]]}}}],["y)+2",{"_index":1807,"t":{"355":{"position":[[2103,4]]}}}],["y)+\\lambda",{"_index":1797,"t":{"355":{"position":[[1680,10],[1766,11]]}}}],["y)^{2",{"_index":1748,"t":{"355":{"position":[[167,6],[186,6]]}}}],["y)^{2}+\\lambda",{"_index":1761,"t":{"355":{"position":[[625,14]]}}}],["y)^{2}+\\lambda|w|l1​=(wx+b−y)2+λ∣w",{"_index":1752,"t":{"355":{"position":[[326,35]]}}}],["y)h=2x(wx+b−i",{"_index":1811,"t":{"357":{"position":[[73,14]]}}}],["y*z",{"_index":2045,"t":{"400":{"position":[[401,4]]}}}],["y.numel",{"_index":1975,"t":{"392":{"position":[[1241,10],[2131,10]]}}}],["y.to(devic",{"_index":1973,"t":{"392":{"position":[[1192,12],[2007,12]]}}}],["y2",{"_index":2091,"t":{"426":{"position":[[192,5]]}}}],["y^=0.4561x1−0.0007x2+0.3251x3+0.0009x4+0.0001x5−0.9142x6−0.553\\hat{y}=0.4561",{"_index":1848,"t":{"363":{"position":[[195,76]]}}}],["y^=w1x1+w2x2+…+wnxn+b\\hat{y}=w_{1",{"_index":1737,"t":{"351":{"position":[[712,34]]}}}],["y^=wx+b\\hat{y}=w",{"_index":1741,"t":{"353":{"position":[[54,16]]}}}],["y_at",{"_index":1833,"t":{"359":{"position":[[782,7]]}}}],["y_hat",{"_index":2011,"t":{"392":{"position":[[2020,5]]}}}],["yay",{"_index":382,"t":{"99":{"position":[[87,3]]},"444":{"position":[[28,3]]}}}],["ye",{"_index":691,"t":{"213":{"position":[[282,6]]}}}],["yi",{"_index":2049,"t":{"400":{"position":[[590,2],[616,2],[636,2]]}}}],["yi,j=∑a,bva,b∗xi+a,j+b=∑a=−δδ∑b=−δδva,b∗xia,j+b(4)y_{i,j}=\\sum_{a,b}{v_{a,b}*x_{i+a,j+b}}=\\sum_{a",{"_index":1560,"t":{"316":{"position":[[32,98]]}}}],["yi,j=∑a,bvi,j,a,b∗xi+a,j+b=∑a,bva,b∗xi+a,j+b(3)y_{i,j}=\\sum_{a,b}{v_{i,j,a,b}*x_{i+a,j+b}}=\\sum_{a,b}{v_{a,b}*x_{i+a,j+b",{"_index":1556,"t":{"314":{"position":[[513,122]]}}}],["yi,j=∑h,wwi,j,h,w∗xh,w(1)y_{i,j}=\\sum_{h,w}{w_{i,j,h,w}*x_{h,w",{"_index":1550,"t":{"314":{"position":[[77,64]]}}}],["yi,j=∑h,wwi,j,h,w∗xh,w=∑a,bvi,j,a,b∗xi+a,j+b(2)y_{i,j}=\\sum_{h,w}{w_{i,j,h,w}*x_{h,w}}=\\sum_{a,b}{v_{i,j,a,b}*x_{i+a,j+b",{"_index":1553,"t":{"314":{"position":[[241,122]]}}}],["yolov1",{"_index":529,"t":{"141":{"position":[[42,14],[57,14],[521,25]]}}}],["yolov2引入了anchor机制代替bbox,将图像划分为13×1313",{"_index":558,"t":{"143":{"position":[[30,37]]}}}],["yolov5使用cspnet实现特征融合,csp",{"_index":564,"t":{"145":{"position":[[9,52]]}}}],["yolo损失函数分为分类损失以及回归损失,可以在分类损失中引入foc",{"_index":583,"t":{"157":{"position":[[50,36]]}}}],["yyy是样本的真实标签,zzz",{"_index":2047,"t":{"400":{"position":[[435,22]]}}}],["y−f(x)∥2=∑[y−f(x)]22(2)\\vert",{"_index":53,"t":{"6":{"position":[[166,29]]},"19":{"position":[[166,29]]}}}],["y−f(x)∥2=∑[y−f(x)]22(6)\\vert",{"_index":1578,"t":{"325":{"position":[[142,29]]}}}],["y为bbox左上角坐标,h",{"_index":543,"t":{"141":{"position":[[322,14]]}}}],["y的大部分将由y_hat",{"_index":1831,"t":{"359":{"position":[[712,16]]}}}],["y轴数据、x轴数据,win参数是窗口的唯一标识,opt可选字典中可以给出窗口的title和legend",{"_index":2077,"t":{"424":{"position":[[49,66]]}}}],["z=w∗x+bz=w*x+bz=w∗x+b",{"_index":2042,"t":{"400":{"position":[[217,60]]}}}],["zi",{"_index":2050,"t":{"400":{"position":[[595,2]]}}}],["zip",{"_index":133,"t":{"21":{"position":[[1007,5],[1013,43]]},"23":{"position":[[997,5],[1003,43]]},"405":{"position":[[993,5],[999,39]]}}}],["zip(a",{"_index":137,"t":{"21":{"position":[[1117,6],[1311,6]]},"23":{"position":[[1107,6],[1301,6]]},"405":{"position":[[1099,6],[1293,6]]}}}],["zsh的配置文件:~/.zshrc",{"_index":2104,"t":{"432":{"position":[[37,17]]}}}],["zzz带入阈值函数,如符号函数sign(z)sign(z)sign(z",{"_index":2043,"t":{"400":{"position":[[278,56]]}}}]],"pipeline":["stemmer"]}}] \ No newline at end of file diff --git a/search/index.html b/search/index.html index 876ed41ae..16fe5de86 100644 --- a/search/index.html +++ b/search/index.html @@ -9,7 +9,7 @@ - +