summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoprDistGit <infra@openeuler.org>2024-03-30 11:18:18 +0000
committerCoprDistGit <infra@openeuler.org>2024-03-30 11:18:18 +0000
commitcd18448a3778e10ebd2fe6fa8dae6707b446f006 (patch)
tree45eaac3ae31b13deb6d6547381cf8d29b4ef2a43
parentac6810e683908ce795e047e2ed7e2f985b5285a0 (diff)
automatic import of transformersopeneuler22.03_LTS_SP2
-rw-r--r--.gitignore1
-rw-r--r--sources1
-rw-r--r--transformers.git2241
-rw-r--r--transformers.spec6
4 files changed, 5 insertions, 2244 deletions
diff --git a/.gitignore b/.gitignore
index e69de29..f194d34 100644
--- a/.gitignore
+++ b/.gitignore
@@ -0,0 +1 @@
+/v4.39.0.tar.gz
diff --git a/sources b/sources
index e69de29..bed34a9 100644
--- a/sources
+++ b/sources
@@ -0,0 +1 @@
+ee67a15fa896751c244882aefd446a8d v4.39.0.tar.gz
diff --git a/transformers.git b/transformers.git
deleted file mode 100644
index d5039af..0000000
--- a/transformers.git
+++ /dev/null
@@ -1,2241 +0,0 @@
-
-
-
-
-
-
-
-<!DOCTYPE html>
-<html
- lang="en"
-
- data-color-mode="auto" data-light-theme="light" data-dark-theme="dark"
- data-a11y-animated-images="system" data-a11y-link-underlines="true"
- >
-
-
-
-
- <head>
- <meta charset="utf-8">
- <link rel="dns-prefetch" href="https://github.githubassets.com">
- <link rel="dns-prefetch" href="https://avatars.githubusercontent.com">
- <link rel="dns-prefetch" href="https://github-cloud.s3.amazonaws.com">
- <link rel="dns-prefetch" href="https://user-images.githubusercontent.com/">
- <link rel="preconnect" href="https://github.githubassets.com" crossorigin>
- <link rel="preconnect" href="https://avatars.githubusercontent.com">
-
-
-
- <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/light-0eace2597ca3.css" /><link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/dark-a167e256da9c.css" /><link data-color-theme="dark_dimmed" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_dimmed-d11f2cf8009b.css" /><link data-color-theme="dark_high_contrast" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_high_contrast-ea7373db06c8.css" /><link data-color-theme="dark_colorblind" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_colorblind-afa99dcf40f7.css" /><link data-color-theme="light_colorblind" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_colorblind-af6c685139ba.css" /><link data-color-theme="light_high_contrast" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_high_contrast-578cdbc8a5a9.css" /><link data-color-theme="light_tritanopia" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/light_tritanopia-5cb699a7e247.css" /><link data-color-theme="dark_tritanopia" crossorigin="anonymous" media="all" rel="stylesheet" data-href="https://github.githubassets.com/assets/dark_tritanopia-9b32204967c6.css" />
- <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-primitives-366b5c973fad.css" />
- <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/primer-42eb5b6ba8cf.css" />
- <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/global-6d852ff5d319.css" />
- <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/github-19c85be4af9c.css" />
- <link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/repository-6247ca238fd4.css" />
-<link crossorigin="anonymous" media="all" rel="stylesheet" href="https://github.githubassets.com/assets/code-111be5e4092d.css" />
-
-
-
-
- <script type="application/json" id="client-env">{"locale":"en","featureFlags":["code_vulnerability_scanning","copilot_conversational_ux_history_refs","copilot_smell_icebreaker_ux","copilot_implicit_context","failbot_handle_non_errors","geojson_azure_maps","image_metric_tracking","marketing_forms_api_integration_contact_request","marketing_pages_search_explore_provider","turbo_experiment_risky","sample_network_conn_type","no_character_key_shortcuts_in_inputs","react_start_transition_for_navigations","custom_inp","remove_child_patch","site_features_copilot_cli_ga"]}</script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/wp-runtime-1d4e9fd37621.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_dompurify_dist_purify_js-6890e890956f.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_oddbird_popover-polyfill_dist_popover_js-7bd350d761f4.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_smoothscroll-polyfill_dist_smoothscroll_js-node_modules_stacktrace-parse-a448e4-bb5415637fe0.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_failbot_failbot_ts-ede00d92f599.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/environment-29216db1b92a.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_selector-observer_dist_index_esm_js-9f960d9b217c.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_behaviors_dist_esm_focus-zone_js-086f7a27bac0.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_relative-time-element_dist_index_js-c76945c5961a.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_combobox-nav_dist_index_js-node_modules_github_markdown-toolbar-e-820fc0-bc8f02b96749.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_auto-complete-element_dist_index_js-node_modules_github_catalyst_-8e9f78-14eb72583307.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_text-expander-element_dist_index_js-8a621df59e80.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_delegated-events_dist_index_js-node_modules_stacktrace-parser_dist_stack-443cd5-559829a63de0.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_filter-input-element_dist_index_js-node_modules_github_remote-inp-b7d8f4-654130b7cde5.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_file-attachment-element_dist_index_js-node_modules_primer_view-co-3959a9-cdadf3cebccb.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/github-elements-9e3dc735b5f9.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/element-registry-a4f804e14c5a.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_mini-throttle_dist_index_js-node_modules_stacktrace-parser_dist_s-1acb1c-a745699a1cfa.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_lit-html_lit-html_js-5b376145beff.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_morphdom_dist_morphdom-esm_js-node_modules_github_memoize_dist_esm_index_js-05801f7ca718.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_turbo_dist_turbo_es2017-esm_js-c91f4ad18b62.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_delegated-events_dist_index_js-node_modules_github_hydro-analytics-clien-b632a3-7938aac89f16.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_remote-form_dist_index_js-node_modules_scroll-anchoring_dist_scro-52dc4b-4fecca2d00e4.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_color-convert_index_js-72c9fbde5ad4.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_behaviors_dist_esm_dimensions_js-node_modules_github_jtml_lib_index_js-95b84ee6bc34.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_quote-selection_dist_index_js-node_modules_github_session-resume_-ff65ee-c202d20e2d3d.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_updatable-content_ts-5d7607113ea3.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_behaviors_task-list_ts-app_assets_modules_github_onfocus_ts-app_ass-421cec-9de4213015af.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_sticky-scroll-into-view_ts-94209c43e6af.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_behaviors_ajax-error_ts-app_assets_modules_github_behaviors_include-467754-244ee9d9ed77.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_behaviors_commenting_edit_ts-app_assets_modules_github_behaviors_ht-83c235-9285faa0e011.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/behaviors-91d3668ba8db.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_delegated-events_dist_index_js-node_modules_github_catalyst_lib_index_js-06ff531-2ea61fcc9a71.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/notifications-global-6d6db5144cc3.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_virtualized-list_es_index_js-node_modules_github_template-parts_lib_index_js-878844713bc9.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_remote-form_dist_index_js-node_modules_delegated-events_dist_inde-e53a3f-44fbe25382ac.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_github_ref-selector_ts-2b432e185ab2.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/codespaces-dfc9aa6a8ebc.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_filter-input-element_dist_index_js-node_modules_github_mini-throt-045591-e61dea04624f.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_file-attachment-element_dist_index_js-node_modules_github_mini-th-55cf52-e14cb4b719b4.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/repositories-c128a422b8b3.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/code-menu-1ede15b5453d.js"></script>
-
-
- <title>GitHub - huggingface/transformers: 🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX.</title>
-
-
-
- <meta name="route-pattern" content="/:user_id/:repository" data-turbo-transient>
- <meta name="route-controller" content="files" data-turbo-transient>
- <meta name="route-action" content="disambiguate" data-turbo-transient>
-
-
- <meta name="current-catalog-service-hash" content="82c569b93da5c18ed649ebd4c2c79437db4611a6a1373e805a3cb001c64130b7">
-
-
- <meta name="request-id" content="B26D:3E1E8C:27141E6:2AB0A2F:6607F1C8" data-pjax-transient="true"/><meta name="html-safe-nonce" content="731cb607541a9abf0cb9c7bd9c624c5fd4fbc0fb84f9e5a30d325c18c349637c" data-pjax-transient="true"/><meta name="visitor-payload" content="eyJyZWZlcnJlciI6IiIsInJlcXVlc3RfaWQiOiJCMjZEOjNFMUU4QzoyNzE0MUU2OjJBQjBBMkY6NjYwN0YxQzgiLCJ2aXNpdG9yX2lkIjoiNTM5Nzg2NTQ0NzA3MTE1MDUzNyIsInJlZ2lvbl9lZGdlIjoic291dGhlYXN0YXNpYSIsInJlZ2lvbl9yZW5kZXIiOiJzb3V0aGVhc3Rhc2lhIn0=" data-pjax-transient="true"/><meta name="visitor-hmac" content="d0fb0fb78c5559946e7de921504c12f559895da267d49a16eaf6e4739410f230" data-pjax-transient="true"/>
-
-
- <meta name="hovercard-subject-tag" content="repository:155220641" data-turbo-transient>
-
-
- <meta name="github-keyboard-shortcuts" content="repository,copilot" data-turbo-transient="true" />
-
-
- <meta name="selected-link" value="repo_source" data-turbo-transient>
- <link rel="assets" href="https://github.githubassets.com/">
-
- <meta name="google-site-verification" content="c1kuD-K2HIVF635lypcsWPoD4kilo5-jA_wBFyT4uMY">
- <meta name="google-site-verification" content="KT5gs8h0wvaagLKAVWq8bbeNwnZZK1r1XQysX3xurLU">
- <meta name="google-site-verification" content="ZzhVyEFwb7w3e0-uOTltm8Jsck2F5StVihD0exw2fsA">
- <meta name="google-site-verification" content="GXs5KoUUkNCoaAZn7wPN-t01Pywp9M3sEjnt_3_ZWPc">
- <meta name="google-site-verification" content="Apib7-x98H0j5cPqHWwSMm6dNU4GmODRoqxLiDzdx9I">
-
-<meta name="octolytics-url" content="https://collector.github.com/github/collect" />
-
- <meta name="analytics-location" content="/&lt;user-name&gt;/&lt;repo-name&gt;" data-turbo-transient="true" />
-
-
-
-
-
-
-
- <meta name="user-login" content="">
-
-
-
- <meta name="viewport" content="width=device-width">
-
- <meta name="description" content="🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX. - huggingface/transformers">
- <link rel="search" type="application/opensearchdescription+xml" href="/opensearch.xml" title="GitHub">
- <link rel="fluid-icon" href="https://github.com/fluidicon.png" title="GitHub">
- <meta property="fb:app_id" content="1401488693436528">
- <meta name="apple-itunes-app" content="app-id=1477376905, app-argument=https://github.com/huggingface/transformers" />
- <meta name="twitter:image:src" content="https://repository-images.githubusercontent.com/155220641/a16c4880-a501-11ea-9e8f-646cf611702e" /><meta name="twitter:site" content="@github" /><meta name="twitter:card" content="summary_large_image" /><meta name="twitter:title" content="GitHub - huggingface/transformers: 🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX." /><meta name="twitter:description" content="🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX. - huggingface/transformers" />
- <meta property="og:image" content="https://repository-images.githubusercontent.com/155220641/a16c4880-a501-11ea-9e8f-646cf611702e" /><meta property="og:image:alt" content="🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX. - huggingface/transformers" /><meta property="og:site_name" content="GitHub" /><meta property="og:type" content="object" /><meta property="og:title" content="GitHub - huggingface/transformers: 🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX." /><meta property="og:url" content="https://github.com/huggingface/transformers" /><meta property="og:description" content="🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX. - huggingface/transformers" />
-
-
-
-
- <meta name="hostname" content="github.com">
-
-
-
- <meta name="expected-hostname" content="github.com">
-
-
- <meta http-equiv="x-pjax-version" content="f49dfd00e6de5aa7328521fc8dc5d35e0c0dc7daa871384d850926fa6c3184b6" data-turbo-track="reload">
- <meta http-equiv="x-pjax-csp-version" content="f226bf37af9c33162063db3eb018fed7f088f86d0a20ca54c013fda96c7f2e05" data-turbo-track="reload">
- <meta http-equiv="x-pjax-css-version" content="741cdcdc602799815ab51d2dbc1778d110ee76f47fcab2caffab6ffb7c6443aa" data-turbo-track="reload">
- <meta http-equiv="x-pjax-js-version" content="b108376328de68cf3a4484cc1f6393a71fe6b32c143f85bf3823bb3075fb2697" data-turbo-track="reload">
-
- <meta name="turbo-cache-control" content="no-preview" data-turbo-transient="">
-
- <meta data-hydrostats="publish">
-
- <meta name="go-import" content="github.com/huggingface/transformers git https://github.com/huggingface/transformers.git">
-
- <meta name="octolytics-dimension-user_id" content="25720743" /><meta name="octolytics-dimension-user_login" content="huggingface" /><meta name="octolytics-dimension-repository_id" content="155220641" /><meta name="octolytics-dimension-repository_nwo" content="huggingface/transformers" /><meta name="octolytics-dimension-repository_public" content="true" /><meta name="octolytics-dimension-repository_is_fork" content="false" /><meta name="octolytics-dimension-repository_network_root_id" content="155220641" /><meta name="octolytics-dimension-repository_network_root_nwo" content="huggingface/transformers" />
-
-
-
- <link rel="canonical" href="https://github.com/huggingface/transformers" data-turbo-transient>
- <meta name="turbo-body-classes" content="logged-out env-production page-responsive">
-
-
- <meta name="browser-stats-url" content="https://api.github.com/_private/browser/stats">
-
- <meta name="browser-errors-url" content="https://api.github.com/_private/browser/errors">
-
- <link rel="mask-icon" href="https://github.githubassets.com/assets/pinned-octocat-093da3e6fa40.svg" color="#000000">
- <link rel="alternate icon" class="js-site-favicon" type="image/png" href="https://github.githubassets.com/favicons/favicon.png">
- <link rel="icon" class="js-site-favicon" type="image/svg+xml" href="https://github.githubassets.com/favicons/favicon.svg">
-
-<meta name="theme-color" content="#1e2327">
-<meta name="color-scheme" content="light dark" />
-
-
- <link rel="manifest" href="/manifest.json" crossOrigin="use-credentials">
-
- </head>
-
- <body class="logged-out env-production page-responsive" style="word-wrap: break-word;">
- <div data-turbo-body class="logged-out env-production page-responsive" style="word-wrap: break-word;">
-
-
-
- <div class="position-relative js-header-wrapper ">
- <a href="#start-of-content" class="px-2 py-4 color-bg-accent-emphasis color-fg-on-emphasis show-on-focus js-skip-to-content">Skip to content</a>
- <span data-view-component="true" class="progress-pjax-loader Progress position-fixed width-full">
- <span style="width: 0%;" data-view-component="true" class="Progress-item progress-pjax-loader-bar left-0 top-0 color-bg-accent-emphasis"></span>
-</span>
-
-
-
-
-
-
-
-
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_Button_IconButton_js-node_modules_primer_react_lib--73d4d2-7feb4a337fc8.js"></script>
-
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/keyboard-shortcuts-dialog-07cc3a6b6201.js"></script>
-
-<react-partial
- partial-name="keyboard-shortcuts-dialog"
- data-ssr="false"
->
-
- <script type="application/json" data-target="react-partial.embeddedData">{"props":{}}</script>
- <div data-target="react-partial.reactRoot"></div>
-</react-partial>
-
-
-
-
-
-
-
-
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_remote-form_dist_index_js-node_modules_delegated-events_dist_inde-94fd67-99519581d0f8.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/sessions-694c8423e347.js"></script>
-<header class="Header-old header-logged-out js-details-container Details position-relative f4 py-3" role="banner" data-color-mode=light data-light-theme=light data-dark-theme=dark>
- <button type="button" class="Header-backdrop d-lg-none border-0 position-fixed top-0 left-0 width-full height-full js-details-target" aria-label="Toggle navigation">
- <span class="d-none">Toggle navigation</span>
- </button>
-
- <div class=" d-flex flex-column flex-lg-row flex-items-center p-responsive height-full position-relative z-1">
- <div class="d-flex flex-justify-between flex-items-center width-full width-lg-auto">
- <a class="mr-lg-3 color-fg-inherit flex-order-2" href="https://github.com/" aria-label="Homepage" data-ga-click="(Logged out) Header, go to homepage, icon:logo-wordmark">
- <svg height="32" aria-hidden="true" viewBox="0 0 16 16" version="1.1" width="32" data-view-component="true" class="octicon octicon-mark-github">
- <path d="M8 0c4.42 0 8 3.58 8 8a8.013 8.013 0 0 1-5.45 7.59c-.4.08-.55-.17-.55-.38 0-.27.01-1.13.01-2.2 0-.75-.25-1.23-.54-1.48 1.78-.2 3.65-.88 3.65-3.95 0-.88-.31-1.59-.82-2.15.08-.2.36-1.02-.08-2.12 0 0-.67-.22-2.2.82-.64-.18-1.32-.27-2-.27-.68 0-1.36.09-2 .27-1.53-1.03-2.2-.82-2.2-.82-.44 1.1-.16 1.92-.08 2.12-.51.56-.82 1.28-.82 2.15 0 3.06 1.86 3.75 3.64 3.95-.23.2-.44.55-.51 1.07-.46.21-1.61.55-2.33-.66-.15-.24-.6-.83-1.23-.82-.67.01-.27.38.01.53.34.19.73.9.82 1.13.16.45.68 1.31 2.69.94 0 .67.01 1.3.01 1.49 0 .21-.15.45-.55.38A7.995 7.995 0 0 1 0 8c0-4.42 3.58-8 8-8Z"></path>
-</svg>
- </a>
-
- <div class="flex-1">
- <a href="/login?return_to=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers"
- class="d-inline-block d-lg-none flex-order-1 f5 no-underline border color-border-default rounded-2 px-2 py-1 color-fg-inherit"
- data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;site header menu&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;SIGN_UP&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="043ed27e05630a128dc9f8f4154090025168c83e7ba31a796c74d6260c24f8af"
- data-ga-click="(Logged out) Header, clicked Sign in, text:sign-in">
- Sign in
- </a>
- </div>
-
- <div class="flex-1 flex-order-2 text-right">
- <button aria-label="Toggle navigation" aria-expanded="false" type="button" data-view-component="true" class="js-details-target Button--link Button--medium Button d-lg-none color-fg-inherit p-1"> <span class="Button-content">
- <span class="Button-label"><div class="HeaderMenu-toggle-bar rounded my-1"></div>
- <div class="HeaderMenu-toggle-bar rounded my-1"></div>
- <div class="HeaderMenu-toggle-bar rounded my-1"></div></span>
- </span>
-</button>
- </div>
- </div>
-
-
- <div class="HeaderMenu--logged-out p-responsive height-fit position-lg-relative d-lg-flex flex-column flex-auto pt-7 pb-4 top-0">
- <div class="header-menu-wrapper d-flex flex-column flex-self-end flex-lg-row flex-justify-between flex-auto p-3 p-lg-0 rounded rounded-lg-0 mt-3 mt-lg-0">
- <nav class="mt-0 px-3 px-lg-0 mb-3 mb-lg-0" aria-label="Global">
- <ul class="d-lg-flex list-style-none">
- <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item">
- <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-3 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false">
- Product
- <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1">
- <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path>
-</svg>
- </button>
- <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 py-2 py-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 d-lg-flex dropdown-menu-wide">
- <div class="px-lg-4 border-lg-right mb-4 mb-lg-0 pr-lg-7">
- <ul class="list-style-none f5" >
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Actions&quot;,&quot;label&quot;:&quot;ref_cta:Actions;&quot;}" href="/features/actions">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-workflow color-fg-subtle mr-3">
- <path d="M1 3a2 2 0 0 1 2-2h6.5a2 2 0 0 1 2 2v6.5a2 2 0 0 1-2 2H7v4.063C7 16.355 7.644 17 8.438 17H12.5v-2.5a2 2 0 0 1 2-2H21a2 2 0 0 1 2 2V21a2 2 0 0 1-2 2h-6.5a2 2 0 0 1-2-2v-2.5H8.437A2.939 2.939 0 0 1 5.5 15.562V11.5H3a2 2 0 0 1-2-2Zm2-.5a.5.5 0 0 0-.5.5v6.5a.5.5 0 0 0 .5.5h6.5a.5.5 0 0 0 .5-.5V3a.5.5 0 0 0-.5-.5ZM14.5 14a.5.5 0 0 0-.5.5V21a.5.5 0 0 0 .5.5H21a.5.5 0 0 0 .5-.5v-6.5a.5.5 0 0 0-.5-.5Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Actions</div>
- Automate any workflow
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Packages&quot;,&quot;label&quot;:&quot;ref_cta:Packages;&quot;}" href="/features/packages">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-package color-fg-subtle mr-3">
- <path d="M12.876.64V.639l8.25 4.763c.541.313.875.89.875 1.515v9.525a1.75 1.75 0 0 1-.875 1.516l-8.25 4.762a1.748 1.748 0 0 1-1.75 0l-8.25-4.763a1.75 1.75 0 0 1-.875-1.515V6.917c0-.625.334-1.202.875-1.515L11.126.64a1.748 1.748 0 0 1 1.75 0Zm-1 1.298L4.251 6.34l7.75 4.474 7.75-4.474-7.625-4.402a.248.248 0 0 0-.25 0Zm.875 19.123 7.625-4.402a.25.25 0 0 0 .125-.216V7.639l-7.75 4.474ZM3.501 7.64v8.803c0 .09.048.172.125.216l7.625 4.402v-8.947Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Packages</div>
- Host and manage packages
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Security&quot;,&quot;label&quot;:&quot;ref_cta:Security;&quot;}" href="/features/security">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-shield-check color-fg-subtle mr-3">
- <path d="M16.53 9.78a.75.75 0 0 0-1.06-1.06L11 13.19l-1.97-1.97a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l5-5Z"></path><path d="m12.54.637 8.25 2.675A1.75 1.75 0 0 1 22 4.976V10c0 6.19-3.771 10.704-9.401 12.83a1.704 1.704 0 0 1-1.198 0C5.77 20.705 2 16.19 2 10V4.976c0-.758.489-1.43 1.21-1.664L11.46.637a1.748 1.748 0 0 1 1.08 0Zm-.617 1.426-8.25 2.676a.249.249 0 0 0-.173.237V10c0 5.46 3.28 9.483 8.43 11.426a.199.199 0 0 0 .14 0C17.22 19.483 20.5 15.461 20.5 10V4.976a.25.25 0 0 0-.173-.237l-8.25-2.676a.253.253 0 0 0-.154 0Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Security</div>
- Find and fix vulnerabilities
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Codespaces&quot;,&quot;label&quot;:&quot;ref_cta:Codespaces;&quot;}" href="/features/codespaces">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-codespaces color-fg-subtle mr-3">
- <path d="M3.5 3.75C3.5 2.784 4.284 2 5.25 2h13.5c.966 0 1.75.784 1.75 1.75v7.5A1.75 1.75 0 0 1 18.75 13H5.25a1.75 1.75 0 0 1-1.75-1.75Zm-2 12c0-.966.784-1.75 1.75-1.75h17.5c.966 0 1.75.784 1.75 1.75v4a1.75 1.75 0 0 1-1.75 1.75H3.25a1.75 1.75 0 0 1-1.75-1.75ZM5.25 3.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h13.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Zm-2 12a.25.25 0 0 0-.25.25v4c0 .138.112.25.25.25h17.5a.25.25 0 0 0 .25-.25v-4a.25.25 0 0 0-.25-.25Z"></path><path d="M10 17.75a.75.75 0 0 1 .75-.75h6.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1-.75-.75Zm-4 0a.75.75 0 0 1 .75-.75h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1-.75-.75Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Codespaces</div>
- Instant dev environments
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Copilot&quot;,&quot;label&quot;:&quot;ref_cta:Copilot;&quot;}" href="/features/copilot">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-copilot color-fg-subtle mr-3">
- <path d="M23.922 16.992c-.861 1.495-5.859 5.023-11.922 5.023-6.063 0-11.061-3.528-11.922-5.023A.641.641 0 0 1 0 16.736v-2.869a.841.841 0 0 1 .053-.22c.372-.935 1.347-2.292 2.605-2.656.167-.429.414-1.055.644-1.517a10.195 10.195 0 0 1-.052-1.086c0-1.331.282-2.499 1.132-3.368.397-.406.89-.717 1.474-.952 1.399-1.136 3.392-2.093 6.122-2.093 2.731 0 4.767.957 6.166 2.093.584.235 1.077.546 1.474.952.85.869 1.132 2.037 1.132 3.368 0 .368-.014.733-.052 1.086.23.462.477 1.088.644 1.517 1.258.364 2.233 1.721 2.605 2.656a.832.832 0 0 1 .053.22v2.869a.641.641 0 0 1-.078.256ZM12.172 11h-.344a4.323 4.323 0 0 1-.355.508C10.703 12.455 9.555 13 7.965 13c-1.725 0-2.989-.359-3.782-1.259a2.005 2.005 0 0 1-.085-.104L4 11.741v6.585c1.435.779 4.514 2.179 8 2.179 3.486 0 6.565-1.4 8-2.179v-6.585l-.098-.104s-.033.045-.085.104c-.793.9-2.057 1.259-3.782 1.259-1.59 0-2.738-.545-3.508-1.492a4.323 4.323 0 0 1-.355-.508h-.016.016Zm.641-2.935c.136 1.057.403 1.913.878 2.497.442.544 1.134.938 2.344.938 1.573 0 2.292-.337 2.657-.751.384-.435.558-1.15.558-2.361 0-1.14-.243-1.847-.705-2.319-.477-.488-1.319-.862-2.824-1.025-1.487-.161-2.192.138-2.533.529-.269.307-.437.808-.438 1.578v.021c0 .265.021.562.063.893Zm-1.626 0c.042-.331.063-.628.063-.894v-.02c-.001-.77-.169-1.271-.438-1.578-.341-.391-1.046-.69-2.533-.529-1.505.163-2.347.537-2.824 1.025-.462.472-.705 1.179-.705 2.319 0 1.211.175 1.926.558 2.361.365.414 1.084.751 2.657.751 1.21 0 1.902-.394 2.344-.938.475-.584.742-1.44.878-2.497Z"></path><path d="M14.5 14.25a1 1 0 0 1 1 1v2a1 1 0 0 1-2 0v-2a1 1 0 0 1 1-1Zm-5 0a1 1 0 0 1 1 1v2a1 1 0 0 1-2 0v-2a1 1 0 0 1 1-1Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Copilot</div>
- Write better code with AI
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Code review&quot;,&quot;label&quot;:&quot;ref_cta:Code review;&quot;}" href="/features/code-review">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-code-review color-fg-subtle mr-3">
- <path d="M10.3 6.74a.75.75 0 0 1-.04 1.06l-2.908 2.7 2.908 2.7a.75.75 0 1 1-1.02 1.1l-3.5-3.25a.75.75 0 0 1 0-1.1l3.5-3.25a.75.75 0 0 1 1.06.04Zm3.44 1.06a.75.75 0 1 1 1.02-1.1l3.5 3.25a.75.75 0 0 1 0 1.1l-3.5 3.25a.75.75 0 1 1-1.02-1.1l2.908-2.7-2.908-2.7Z"></path><path d="M1.5 4.25c0-.966.784-1.75 1.75-1.75h17.5c.966 0 1.75.784 1.75 1.75v12.5a1.75 1.75 0 0 1-1.75 1.75h-9.69l-3.573 3.573A1.458 1.458 0 0 1 5 21.043V18.5H3.25a1.75 1.75 0 0 1-1.75-1.75ZM3.25 4a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h2.5a.75.75 0 0 1 .75.75v3.19l3.72-3.72a.749.749 0 0 1 .53-.22h10a.25.25 0 0 0 .25-.25V4.25a.25.25 0 0 0-.25-.25Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Code review</div>
- Manage code changes
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center pb-lg-3" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Issues&quot;,&quot;label&quot;:&quot;ref_cta:Issues;&quot;}" href="/features/issues">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-issue-opened color-fg-subtle mr-3">
- <path d="M12 1c6.075 0 11 4.925 11 11s-4.925 11-11 11S1 18.075 1 12 5.925 1 12 1ZM2.5 12a9.5 9.5 0 0 0 9.5 9.5 9.5 9.5 0 0 0 9.5-9.5A9.5 9.5 0 0 0 12 2.5 9.5 9.5 0 0 0 2.5 12Zm9.5 2a2 2 0 1 1-.001-3.999A2 2 0 0 1 12 14Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Issues</div>
- Plan and track work
- </div>
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Discussions&quot;,&quot;label&quot;:&quot;ref_cta:Discussions;&quot;}" href="/features/discussions">
- <svg aria-hidden="true" height="24" viewBox="0 0 24 24" version="1.1" width="24" data-view-component="true" class="octicon octicon-comment-discussion color-fg-subtle mr-3">
- <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v9.5A1.75 1.75 0 0 1 14.25 14H8.061l-2.574 2.573A1.458 1.458 0 0 1 3 15.543V14H1.75A1.75 1.75 0 0 1 0 12.25v-9.5C0 1.784.784 1 1.75 1ZM1.5 2.75v9.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-9.5a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Z"></path><path d="M22.5 8.75a.25.25 0 0 0-.25-.25h-3.5a.75.75 0 0 1 0-1.5h3.5c.966 0 1.75.784 1.75 1.75v9.5A1.75 1.75 0 0 1 22.25 20H21v1.543a1.457 1.457 0 0 1-2.487 1.03L15.939 20H10.75A1.75 1.75 0 0 1 9 18.25v-1.465a.75.75 0 0 1 1.5 0v1.465c0 .138.112.25.25.25h5.5a.75.75 0 0 1 .53.22l2.72 2.72v-2.19a.75.75 0 0 1 .75-.75h2a.25.25 0 0 0 .25-.25v-9.5Z"></path>
-</svg>
- <div>
- <div class="color-fg-default h4">Discussions</div>
- Collaborate outside of code
- </div>
-
-
-</a></li>
-
- </ul>
- </div>
- <div class="px-lg-4">
- <span class="d-block h4 color-fg-default my-1" id="product-explore-heading">Explore</span>
- <ul class="list-style-none f5" aria-labelledby="product-explore-heading">
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to All features&quot;,&quot;label&quot;:&quot;ref_cta:All features;&quot;}" href="/features">
- All features
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Documentation&quot;,&quot;label&quot;:&quot;ref_cta:Documentation;&quot;}" href="https://docs.github.com">
- Documentation
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to GitHub Skills&quot;,&quot;label&quot;:&quot;ref_cta:GitHub Skills;&quot;}" href="https://skills.github.com/">
- GitHub Skills
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Product&quot;,&quot;action&quot;:&quot;click to go to Blog&quot;,&quot;label&quot;:&quot;ref_cta:Blog;&quot;}" href="https://github.blog">
- Blog
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- </ul>
- </div>
- </div>
-</li>
-
-
- <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item">
- <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-3 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false">
- Solutions
- <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1">
- <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path>
-</svg>
- </button>
- <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 py-2 py-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 px-lg-4">
- <div class="border-bottom pb-3 mb-3">
- <span class="d-block h4 color-fg-default my-1" id="solutions-for-heading">For</span>
- <ul class="list-style-none f5" aria-labelledby="solutions-for-heading">
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Enterprise&quot;,&quot;label&quot;:&quot;ref_cta:Enterprise;&quot;}" href="/enterprise">
- Enterprise
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Teams&quot;,&quot;label&quot;:&quot;ref_cta:Teams;&quot;}" href="/team">
- Teams
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Startups&quot;,&quot;label&quot;:&quot;ref_cta:Startups;&quot;}" href="/enterprise/startups">
- Startups
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Education&quot;,&quot;label&quot;:&quot;ref_cta:Education;&quot;}" href="https://education.github.com">
- Education
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- </ul>
- </div>
- <div class="border-bottom pb-3 mb-3">
- <span class="d-block h4 color-fg-default my-1" id="solutions-by-solution-heading">By Solution</span>
- <ul class="list-style-none f5" aria-labelledby="solutions-by-solution-heading">
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to CI/CD &amp;amp; Automation&quot;,&quot;label&quot;:&quot;ref_cta:CI/CD &amp;amp; Automation;&quot;}" href="/solutions/ci-cd/">
- CI/CD &amp; Automation
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to DevOps&quot;,&quot;label&quot;:&quot;ref_cta:DevOps;&quot;}" href="/solutions/devops/">
- DevOps
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to DevSecOps&quot;,&quot;label&quot;:&quot;ref_cta:DevSecOps;&quot;}" href="https://resources.github.com/devops/fundamentals/devsecops/">
- DevSecOps
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- </ul>
- </div>
- <div class="">
- <span class="d-block h4 color-fg-default my-1" id="solutions-resources-heading">Resources</span>
- <ul class="list-style-none f5" aria-labelledby="solutions-resources-heading">
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Learning Pathways&quot;,&quot;label&quot;:&quot;ref_cta:Learning Pathways;&quot;}" href="https://resources.github.com/learn/pathways/">
- Learning Pathways
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to White papers, Ebooks, Webinars&quot;,&quot;label&quot;:&quot;ref_cta:White papers, Ebooks, Webinars;&quot;}" href="https://resources.github.com/">
- White papers, Ebooks, Webinars
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Customer Stories&quot;,&quot;label&quot;:&quot;ref_cta:Customer Stories;&quot;}" href="/customer-stories">
- Customer Stories
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" target="_blank" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Solutions&quot;,&quot;action&quot;:&quot;click to go to Partners&quot;,&quot;label&quot;:&quot;ref_cta:Partners;&quot;}" href="https://partner.github.com/">
- Partners
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link-external HeaderMenu-external-icon color-fg-subtle">
- <path d="M3.75 2h3.5a.75.75 0 0 1 0 1.5h-3.5a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-3.5a.75.75 0 0 1 1.5 0v3.5A1.75 1.75 0 0 1 12.25 14h-8.5A1.75 1.75 0 0 1 2 12.25v-8.5C2 2.784 2.784 2 3.75 2Zm6.854-1h4.146a.25.25 0 0 1 .25.25v4.146a.25.25 0 0 1-.427.177L13.03 4.03 9.28 7.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.75-3.75-1.543-1.543A.25.25 0 0 1 10.604 1Z"></path>
-</svg>
-</a></li>
-
- </ul>
- </div>
- </div>
-</li>
-
-
- <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item">
- <button type="button" class="HeaderMenu-link border-0 width-full width-lg-auto px-0 px-lg-2 py-3 py-lg-2 no-wrap d-flex flex-items-center flex-justify-between js-details-target" aria-expanded="false">
- Open Source
- <svg opacity="0.5" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-chevron-down HeaderMenu-icon ml-1">
- <path d="M12.78 5.22a.749.749 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.06 0L3.22 6.28a.749.749 0 1 1 1.06-1.06L8 8.939l3.72-3.719a.749.749 0 0 1 1.06 0Z"></path>
-</svg>
- </button>
- <div class="HeaderMenu-dropdown dropdown-menu rounded m-0 p-0 py-2 py-lg-4 position-relative position-lg-absolute left-0 left-lg-n3 px-lg-4">
- <div class="border-bottom pb-3 mb-3">
- <ul class="list-style-none f5" >
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Open Source&quot;,&quot;action&quot;:&quot;click to go to GitHub Sponsors&quot;,&quot;label&quot;:&quot;ref_cta:GitHub Sponsors;&quot;}" href="/sponsors">
-
- <div>
- <div class="color-fg-default h4">GitHub Sponsors</div>
- Fund open source developers
- </div>
-
-
-</a></li>
-
- </ul>
- </div>
- <div class="border-bottom pb-3 mb-3">
- <ul class="list-style-none f5" >
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary d-flex flex-items-center" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Open Source&quot;,&quot;action&quot;:&quot;click to go to The ReadME Project&quot;,&quot;label&quot;:&quot;ref_cta:The ReadME Project;&quot;}" href="/readme">
-
- <div>
- <div class="color-fg-default h4">The ReadME Project</div>
- GitHub community articles
- </div>
-
-
-</a></li>
-
- </ul>
- </div>
- <div class="">
- <span class="d-block h4 color-fg-default my-1" id="open-source-repositories-heading">Repositories</span>
- <ul class="list-style-none f5" aria-labelledby="open-source-repositories-heading">
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Open Source&quot;,&quot;action&quot;:&quot;click to go to Topics&quot;,&quot;label&quot;:&quot;ref_cta:Topics;&quot;}" href="/topics">
- Topics
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Open Source&quot;,&quot;action&quot;:&quot;click to go to Trending&quot;,&quot;label&quot;:&quot;ref_cta:Trending;&quot;}" href="/trending">
- Trending
-
-
-</a></li>
-
- <li>
- <a class="HeaderMenu-dropdown-link lh-condensed d-block no-underline position-relative py-2 Link--secondary" data-analytics-event="{&quot;category&quot;:&quot;Header dropdown (logged out), Open Source&quot;,&quot;action&quot;:&quot;click to go to Collections&quot;,&quot;label&quot;:&quot;ref_cta:Collections;&quot;}" href="/collections">
- Collections
-
-
-</a></li>
-
- </ul>
- </div>
- </div>
-</li>
-
-
- <li class="HeaderMenu-item position-relative flex-wrap flex-justify-between flex-items-center d-block d-lg-flex flex-lg-nowrap flex-lg-items-center js-details-container js-header-menu-item">
- <a class="HeaderMenu-link no-underline px-0 px-lg-2 py-3 py-lg-2 d-block d-lg-inline-block" data-analytics-event="{&quot;category&quot;:&quot;Header menu top item (logged out)&quot;,&quot;action&quot;:&quot;click to go to Pricing&quot;,&quot;label&quot;:&quot;ref_cta:Pricing;&quot;}" href="/pricing">Pricing</a>
-</li>
-
- </ul>
- </nav>
-
- <div class="d-lg-flex flex-items-center mb-3 mb-lg-0 text-center text-lg-left ml-3" style="">
-
-
-
-<qbsearch-input class="search-input" data-scope="repo:huggingface/transformers" data-custom-scopes-path="/search/custom_scopes" data-delete-custom-scopes-csrf="80cVgYLuYklpmMKsdMc2mcPwucLxudacJB2ANWDj-k2eWM1tSN8bUWkIsuN_p3M-lVRjB7anoTqgWnNbKVVw7A" data-max-custom-scopes="10" data-header-redesign-enabled="false" data-initial-value="" data-blackbird-suggestions-path="/search/suggestions" data-jump-to-suggestions-path="/_graphql/GetSuggestedNavigationDestinations" data-current-repository="huggingface/transformers" data-current-org="huggingface" data-current-owner="" data-logged-in="false" data-copilot-chat-enabled="false" data-blackbird-indexed-repo-csrf="<esi:include src=&quot;/_esi/rails_csrf_token_form_hidden?r=7mqH1K2DuK3%2B8ze%2FX5WENAuiOh0D%2FhNQTnPkSf2dwckIIKhWlznfLpN7m3g4png03uh9HjWUjpysCdh%2BOIj0SvmDJYth4mUevo5G9dqsasPot4Ua3%2BNAfu3w5t3kW25NkNjX6%2BkU59MecmB00ZyvUxQLfm%2BYQHi3ER6sK2%2BCJ1WW1fF3GQs3UVWG%2FOzM%2Fe4RWKV4R7%2ByooshNXkTnPat0lVlXX1QdbNsBx469wALGoa0tAoLX5XJfNb592naKcTTHwgYPbiJilQk%2BydK5LQ%2F3TxZ2yq421uaIxYUW2oc9HgoR%2FwxzUOcb3dXtZQJ%2BXbRro9SCTemanD2Da5IZChgqGkAk3LhN5kcdq0EgtcWg9xbXm9w7HUmfkEzw48QpNuO%2FmCD8UEcOXl64yGY5%2Fxe8Qn2ezCv8us6sQwr6YHrlvmR%2Fwiosv3oRGfzXQVPF8f2GhpCBbfVKAdk8XF%2Fum3kAcrdmTDv%2B39%2BjuY4ZRiHSp1CmR0sh8qwbqBvvHCQQqU6n63ay36KVzj6X9vPDgtVFyeD3JhgYg%3D%3D--W5E0wY%2BRzhl6EL7X--l9kcQYIItKEjyAyW9Dba%2BQ%3D%3D&quot; />">
- <div
- class="search-input-container search-with-dialog position-relative d-flex flex-row flex-items-center mr-4 rounded"
- data-action="click:qbsearch-input#searchInputContainerClicked"
- >
- <button
- type="button"
- class="header-search-button placeholder input-button form-control d-flex flex-1 flex-self-stretch flex-items-center no-wrap width-full py-0 pl-2 pr-0 text-left border-0 box-shadow-none"
- data-target="qbsearch-input.inputButton"
- placeholder="Search or jump to..."
- data-hotkey=s,/
- autocapitalize="off"
- data-action="click:qbsearch-input#handleExpand"
- >
- <div class="mr-2 color-fg-muted">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search">
- <path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path>
-</svg>
- </div>
- <span class="flex-1" data-target="qbsearch-input.inputButtonText">Search or jump to...</span>
- <div class="d-flex" data-target="qbsearch-input.hotkeyIndicator">
- <svg xmlns="http://www.w3.org/2000/svg" width="22" height="20" aria-hidden="true" class="mr-1"><path fill="none" stroke="#979A9C" opacity=".4" d="M3.5.5h12c1.7 0 3 1.3 3 3v13c0 1.7-1.3 3-3 3h-12c-1.7 0-3-1.3-3-3v-13c0-1.7 1.3-3 3-3z"></path><path fill="#979A9C" d="M11.8 6L8 15.1h-.9L10.8 6h1z"></path></svg>
-
- </div>
- </button>
-
- <input type="hidden" name="type" class="js-site-search-type-field">
-
-
-<div class="Overlay--hidden " data-modal-dialog-overlay>
- <modal-dialog data-action="close:qbsearch-input#handleClose cancel:qbsearch-input#handleClose" data-target="qbsearch-input.searchSuggestionsDialog" role="dialog" id="search-suggestions-dialog" aria-modal="true" aria-labelledby="search-suggestions-dialog-header" data-view-component="true" class="Overlay Overlay--width-large Overlay--height-auto">
- <h1 id="search-suggestions-dialog-header" class="sr-only">Search code, repositories, users, issues, pull requests...</h1>
- <div class="Overlay-body Overlay-body--paddingNone">
-
- <div data-view-component="true"> <div class="search-suggestions position-fixed width-full color-shadow-large border color-fg-default color-bg-default overflow-hidden d-flex flex-column query-builder-container"
- style="border-radius: 12px;"
- data-target="qbsearch-input.queryBuilderContainer"
- hidden
- >
- <!-- '"` --><!-- </textarea></xmp> --></option></form><form id="query-builder-test-form" action="" accept-charset="UTF-8" method="get">
- <query-builder data-target="qbsearch-input.queryBuilder" id="query-builder-query-builder-test" data-filter-key=":" data-view-component="true" class="QueryBuilder search-query-builder">
- <div class="FormControl FormControl--fullWidth">
- <label id="query-builder-test-label" for="query-builder-test" class="FormControl-label sr-only">
- Search
- </label>
- <div
- class="QueryBuilder-StyledInput width-fit "
- data-target="query-builder.styledInput"
- >
- <span id="query-builder-test-leadingvisual-wrap" class="FormControl-input-leadingVisualWrap QueryBuilder-leadingVisualWrap">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search FormControl-input-leadingVisual">
- <path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path>
-</svg>
- </span>
- <div data-target="query-builder.styledInputContainer" class="QueryBuilder-StyledInputContainer">
- <div
- aria-hidden="true"
- class="QueryBuilder-StyledInputContent"
- data-target="query-builder.styledInputContent"
- ></div>
- <div class="QueryBuilder-InputWrapper">
- <div aria-hidden="true" class="QueryBuilder-Sizer" data-target="query-builder.sizer"></div>
- <input id="query-builder-test" name="query-builder-test" value="" autocomplete="off" type="text" role="combobox" spellcheck="false" aria-expanded="false" aria-describedby="validation-53a4ff2b-98c2-4da8-91c1-13ad5c19ae70" data-target="query-builder.input" data-action="
- input:query-builder#inputChange
- blur:query-builder#inputBlur
- keydown:query-builder#inputKeydown
- focus:query-builder#inputFocus
- " data-view-component="true" class="FormControl-input QueryBuilder-Input FormControl-medium" />
- </div>
- </div>
- <span class="sr-only" id="query-builder-test-clear">Clear</span>
- <button role="button" id="query-builder-test-clear-button" aria-labelledby="query-builder-test-clear query-builder-test-label" data-target="query-builder.clearButton" data-action="
- click:query-builder#clear
- focus:query-builder#clearButtonFocus
- blur:query-builder#clearButtonBlur
- " variant="small" hidden="hidden" type="button" data-view-component="true" class="Button Button--iconOnly Button--invisible Button--medium mr-1 px-2 py-0 d-flex flex-items-center rounded-1 color-fg-muted"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x-circle-fill Button-visual">
- <path d="M2.343 13.657A8 8 0 1 1 13.658 2.343 8 8 0 0 1 2.343 13.657ZM6.03 4.97a.751.751 0 0 0-1.042.018.751.751 0 0 0-.018 1.042L6.94 8 4.97 9.97a.749.749 0 0 0 .326 1.275.749.749 0 0 0 .734-.215L8 9.06l1.97 1.97a.749.749 0 0 0 1.275-.326.749.749 0 0 0-.215-.734L9.06 8l1.97-1.97a.749.749 0 0 0-.326-1.275.749.749 0 0 0-.734.215L8 6.94Z"></path>
-</svg>
-</button>
-
- </div>
- <template id="search-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-search">
- <path d="M10.68 11.74a6 6 0 0 1-7.922-8.982 6 6 0 0 1 8.982 7.922l3.04 3.04a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215ZM11.5 7a4.499 4.499 0 1 0-8.997 0A4.499 4.499 0 0 0 11.5 7Z"></path>
-</svg>
-</template>
-
-<template id="code-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code">
- <path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
-</template>
-
-<template id="file-code-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-file-code">
- <path d="M4 1.75C4 .784 4.784 0 5.75 0h5.586c.464 0 .909.184 1.237.513l2.914 2.914c.329.328.513.773.513 1.237v8.586A1.75 1.75 0 0 1 14.25 15h-9a.75.75 0 0 1 0-1.5h9a.25.25 0 0 0 .25-.25V6h-2.75A1.75 1.75 0 0 1 10 4.25V1.5H5.75a.25.25 0 0 0-.25.25v2.5a.75.75 0 0 1-1.5 0Zm1.72 4.97a.75.75 0 0 1 1.06 0l2 2a.75.75 0 0 1 0 1.06l-2 2a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734l1.47-1.47-1.47-1.47a.75.75 0 0 1 0-1.06ZM3.28 7.78 1.81 9.25l1.47 1.47a.751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018l-2-2a.75.75 0 0 1 0-1.06l2-2a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Zm8.22-6.218V4.25c0 .138.112.25.25.25h2.688l-.011-.013-2.914-2.914-.013-.011Z"></path>
-</svg>
-</template>
-
-<template id="history-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-history">
- <path d="m.427 1.927 1.215 1.215a8.002 8.002 0 1 1-1.6 5.685.75.75 0 1 1 1.493-.154 6.5 6.5 0 1 0 1.18-4.458l1.358 1.358A.25.25 0 0 1 3.896 6H.25A.25.25 0 0 1 0 5.75V2.104a.25.25 0 0 1 .427-.177ZM7.75 4a.75.75 0 0 1 .75.75v2.992l2.028.812a.75.75 0 0 1-.557 1.392l-2.5-1A.751.751 0 0 1 7 8.25v-3.5A.75.75 0 0 1 7.75 4Z"></path>
-</svg>
-</template>
-
-<template id="repo-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo">
- <path d="M2 2.5A2.5 2.5 0 0 1 4.5 0h8.75a.75.75 0 0 1 .75.75v12.5a.75.75 0 0 1-.75.75h-2.5a.75.75 0 0 1 0-1.5h1.75v-2h-8a1 1 0 0 0-.714 1.7.75.75 0 1 1-1.072 1.05A2.495 2.495 0 0 1 2 11.5Zm10.5-1h-8a1 1 0 0 0-1 1v6.708A2.486 2.486 0 0 1 4.5 9h8ZM5 12.25a.25.25 0 0 1 .25-.25h3.5a.25.25 0 0 1 .25.25v3.25a.25.25 0 0 1-.4.2l-1.45-1.087a.249.249 0 0 0-.3 0L5.4 15.7a.25.25 0 0 1-.4-.2Z"></path>
-</svg>
-</template>
-
-<template id="bookmark-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bookmark">
- <path d="M3 2.75C3 1.784 3.784 1 4.75 1h6.5c.966 0 1.75.784 1.75 1.75v11.5a.75.75 0 0 1-1.227.579L8 11.722l-3.773 3.107A.751.751 0 0 1 3 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v9.91l3.023-2.489a.75.75 0 0 1 .954 0l3.023 2.49V2.75a.25.25 0 0 0-.25-.25Z"></path>
-</svg>
-</template>
-
-<template id="plus-circle-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-plus-circle">
- <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm7.25-3.25v2.5h2.5a.75.75 0 0 1 0 1.5h-2.5v2.5a.75.75 0 0 1-1.5 0v-2.5h-2.5a.75.75 0 0 1 0-1.5h2.5v-2.5a.75.75 0 0 1 1.5 0Z"></path>
-</svg>
-</template>
-
-<template id="circle-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
-</template>
-
-<template id="trash-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-trash">
- <path d="M11 1.75V3h2.25a.75.75 0 0 1 0 1.5H2.75a.75.75 0 0 1 0-1.5H5V1.75C5 .784 5.784 0 6.75 0h2.5C10.216 0 11 .784 11 1.75ZM4.496 6.675l.66 6.6a.25.25 0 0 0 .249.225h5.19a.25.25 0 0 0 .249-.225l.66-6.6a.75.75 0 0 1 1.492.149l-.66 6.6A1.748 1.748 0 0 1 10.595 15h-5.19a1.75 1.75 0 0 1-1.741-1.575l-.66-6.6a.75.75 0 1 1 1.492-.15ZM6.5 1.75V3h3V1.75a.25.25 0 0 0-.25-.25h-2.5a.25.25 0 0 0-.25.25Z"></path>
-</svg>
-</template>
-
-<template id="team-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-people">
- <path d="M2 5.5a3.5 3.5 0 1 1 5.898 2.549 5.508 5.508 0 0 1 3.034 4.084.75.75 0 1 1-1.482.235 4 4 0 0 0-7.9 0 .75.75 0 0 1-1.482-.236A5.507 5.507 0 0 1 3.102 8.05 3.493 3.493 0 0 1 2 5.5ZM11 4a3.001 3.001 0 0 1 2.22 5.018 5.01 5.01 0 0 1 2.56 3.012.749.749 0 0 1-.885.954.752.752 0 0 1-.549-.514 3.507 3.507 0 0 0-2.522-2.372.75.75 0 0 1-.574-.73v-.352a.75.75 0 0 1 .416-.672A1.5 1.5 0 0 0 11 5.5.75.75 0 0 1 11 4Zm-5.5-.5a2 2 0 1 0-.001 3.999A2 2 0 0 0 5.5 3.5Z"></path>
-</svg>
-</template>
-
-<template id="project-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-project">
- <path d="M1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25V1.75C0 .784.784 0 1.75 0ZM1.5 1.75v12.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25ZM11.75 3a.75.75 0 0 1 .75.75v7.5a.75.75 0 0 1-1.5 0v-7.5a.75.75 0 0 1 .75-.75Zm-8.25.75a.75.75 0 0 1 1.5 0v5.5a.75.75 0 0 1-1.5 0ZM8 3a.75.75 0 0 1 .75.75v3.5a.75.75 0 0 1-1.5 0v-3.5A.75.75 0 0 1 8 3Z"></path>
-</svg>
-</template>
-
-<template id="pencil-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pencil">
- <path d="M11.013 1.427a1.75 1.75 0 0 1 2.474 0l1.086 1.086a1.75 1.75 0 0 1 0 2.474l-8.61 8.61c-.21.21-.47.364-.756.445l-3.251.93a.75.75 0 0 1-.927-.928l.929-3.25c.081-.286.235-.547.445-.758l8.61-8.61Zm.176 4.823L9.75 4.81l-6.286 6.287a.253.253 0 0 0-.064.108l-.558 1.953 1.953-.558a.253.253 0 0 0 .108-.064Zm1.238-3.763a.25.25 0 0 0-.354 0L10.811 3.75l1.439 1.44 1.263-1.263a.25.25 0 0 0 0-.354Z"></path>
-</svg>
-</template>
-
-<template id="copilot-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copilot">
- <path d="M7.998 15.035c-4.562 0-7.873-2.914-7.998-3.749V9.338c.085-.628.677-1.686 1.588-2.065.013-.07.024-.143.036-.218.029-.183.06-.384.126-.612-.201-.508-.254-1.084-.254-1.656 0-.87.128-1.769.693-2.484.579-.733 1.494-1.124 2.724-1.261 1.206-.134 2.262.034 2.944.765.05.053.096.108.139.165.044-.057.094-.112.143-.165.682-.731 1.738-.899 2.944-.765 1.23.137 2.145.528 2.724 1.261.566.715.693 1.614.693 2.484 0 .572-.053 1.148-.254 1.656.066.228.098.429.126.612.012.076.024.148.037.218.924.385 1.522 1.471 1.591 2.095v1.872c0 .766-3.351 3.795-8.002 3.795Zm0-1.485c2.28 0 4.584-1.11 5.002-1.433V7.862l-.023-.116c-.49.21-1.075.291-1.727.291-1.146 0-2.059-.327-2.71-.991A3.222 3.222 0 0 1 8 6.303a3.24 3.24 0 0 1-.544.743c-.65.664-1.563.991-2.71.991-.652 0-1.236-.081-1.727-.291l-.023.116v4.255c.419.323 2.722 1.433 5.002 1.433ZM6.762 2.83c-.193-.206-.637-.413-1.682-.297-1.019.113-1.479.404-1.713.7-.247.312-.369.789-.369 1.554 0 .793.129 1.171.308 1.371.162.181.519.379 1.442.379.853 0 1.339-.235 1.638-.54.315-.322.527-.827.617-1.553.117-.935-.037-1.395-.241-1.614Zm4.155-.297c-1.044-.116-1.488.091-1.681.297-.204.219-.359.679-.242 1.614.091.726.303 1.231.618 1.553.299.305.784.54 1.638.54.922 0 1.28-.198 1.442-.379.179-.2.308-.578.308-1.371 0-.765-.123-1.242-.37-1.554-.233-.296-.693-.587-1.713-.7Z"></path><path d="M6.25 9.037a.75.75 0 0 1 .75.75v1.501a.75.75 0 0 1-1.5 0V9.787a.75.75 0 0 1 .75-.75Zm4.25.75v1.501a.75.75 0 0 1-1.5 0V9.787a.75.75 0 0 1 1.5 0Z"></path>
-</svg>
-</template>
-
-<template id="workflow-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-workflow">
- <path d="M0 1.75C0 .784.784 0 1.75 0h3.5C6.216 0 7 .784 7 1.75v3.5A1.75 1.75 0 0 1 5.25 7H4v4a1 1 0 0 0 1 1h4v-1.25C9 9.784 9.784 9 10.75 9h3.5c.966 0 1.75.784 1.75 1.75v3.5A1.75 1.75 0 0 1 14.25 16h-3.5A1.75 1.75 0 0 1 9 14.25v-.75H5A2.5 2.5 0 0 1 2.5 11V7h-.75A1.75 1.75 0 0 1 0 5.25Zm1.75-.25a.25.25 0 0 0-.25.25v3.5c0 .138.112.25.25.25h3.5a.25.25 0 0 0 .25-.25v-3.5a.25.25 0 0 0-.25-.25Zm9 9a.25.25 0 0 0-.25.25v3.5c0 .138.112.25.25.25h3.5a.25.25 0 0 0 .25-.25v-3.5a.25.25 0 0 0-.25-.25Z"></path>
-</svg>
-</template>
-
-<template id="book-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book">
- <path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path>
-</svg>
-</template>
-
-<template id="code-review-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code-review">
- <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v8.5A1.75 1.75 0 0 1 14.25 13H8.061l-2.574 2.573A1.458 1.458 0 0 1 3 14.543V13H1.75A1.75 1.75 0 0 1 0 11.25v-8.5C0 1.784.784 1 1.75 1ZM1.5 2.75v8.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h6.5a.25.25 0 0 0 .25-.25v-8.5a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Zm5.28 1.72a.75.75 0 0 1 0 1.06L5.31 7l1.47 1.47a.751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018l-2-2a.75.75 0 0 1 0-1.06l2-2a.75.75 0 0 1 1.06 0Zm2.44 0a.75.75 0 0 1 1.06 0l2 2a.75.75 0 0 1 0 1.06l-2 2a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L10.69 7 9.22 5.53a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
-</template>
-
-<template id="codespaces-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-codespaces">
- <path d="M0 11.25c0-.966.784-1.75 1.75-1.75h12.5c.966 0 1.75.784 1.75 1.75v3A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25Zm2-9.5C2 .784 2.784 0 3.75 0h8.5C13.216 0 14 .784 14 1.75v5a1.75 1.75 0 0 1-1.75 1.75h-8.5A1.75 1.75 0 0 1 2 6.75Zm1.75-.25a.25.25 0 0 0-.25.25v5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25v-5a.25.25 0 0 0-.25-.25Zm-2 9.5a.25.25 0 0 0-.25.25v3c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-3a.25.25 0 0 0-.25-.25Z"></path><path d="M7 12.75a.75.75 0 0 1 .75-.75h4.5a.75.75 0 0 1 0 1.5h-4.5a.75.75 0 0 1-.75-.75Zm-4 0a.75.75 0 0 1 .75-.75h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1-.75-.75Z"></path>
-</svg>
-</template>
-
-<template id="comment-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-comment">
- <path d="M1 2.75C1 1.784 1.784 1 2.75 1h10.5c.966 0 1.75.784 1.75 1.75v7.5A1.75 1.75 0 0 1 13.25 12H9.06l-2.573 2.573A1.458 1.458 0 0 1 4 13.543V12H2.75A1.75 1.75 0 0 1 1 10.25Zm1.75-.25a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h2a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h4.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Z"></path>
-</svg>
-</template>
-
-<template id="comment-discussion-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-comment-discussion">
- <path d="M1.75 1h8.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 10.25 10H7.061l-2.574 2.573A1.458 1.458 0 0 1 2 11.543V10h-.25A1.75 1.75 0 0 1 0 8.25v-5.5C0 1.784.784 1 1.75 1ZM1.5 2.75v5.5c0 .138.112.25.25.25h1a.75.75 0 0 1 .75.75v2.19l2.72-2.72a.749.749 0 0 1 .53-.22h3.5a.25.25 0 0 0 .25-.25v-5.5a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25Zm13 2a.25.25 0 0 0-.25-.25h-.5a.75.75 0 0 1 0-1.5h.5c.966 0 1.75.784 1.75 1.75v5.5A1.75 1.75 0 0 1 14.25 12H14v1.543a1.458 1.458 0 0 1-2.487 1.03L9.22 12.28a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215l2.22 2.22v-2.19a.75.75 0 0 1 .75-.75h1a.25.25 0 0 0 .25-.25Z"></path>
-</svg>
-</template>
-
-<template id="organization-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-organization">
- <path d="M1.75 16A1.75 1.75 0 0 1 0 14.25V1.75C0 .784.784 0 1.75 0h8.5C11.216 0 12 .784 12 1.75v12.5c0 .085-.006.168-.018.25h2.268a.25.25 0 0 0 .25-.25V8.285a.25.25 0 0 0-.111-.208l-1.055-.703a.749.749 0 1 1 .832-1.248l1.055.703c.487.325.779.871.779 1.456v5.965A1.75 1.75 0 0 1 14.25 16h-3.5a.766.766 0 0 1-.197-.026c-.099.017-.2.026-.303.026h-3a.75.75 0 0 1-.75-.75V14h-1v1.25a.75.75 0 0 1-.75.75Zm-.25-1.75c0 .138.112.25.25.25H4v-1.25a.75.75 0 0 1 .75-.75h2.5a.75.75 0 0 1 .75.75v1.25h2.25a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25ZM3.75 6h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5ZM3 3.75A.75.75 0 0 1 3.75 3h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 3.75Zm4 3A.75.75 0 0 1 7.75 6h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 7 6.75ZM7.75 3h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5ZM3 9.75A.75.75 0 0 1 3.75 9h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 9.75ZM7.75 9h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5Z"></path>
-</svg>
-</template>
-
-<template id="rocket-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-rocket">
- <path d="M14.064 0h.186C15.216 0 16 .784 16 1.75v.186a8.752 8.752 0 0 1-2.564 6.186l-.458.459c-.314.314-.641.616-.979.904v3.207c0 .608-.315 1.172-.833 1.49l-2.774 1.707a.749.749 0 0 1-1.11-.418l-.954-3.102a1.214 1.214 0 0 1-.145-.125L3.754 9.816a1.218 1.218 0 0 1-.124-.145L.528 8.717a.749.749 0 0 1-.418-1.11l1.71-2.774A1.748 1.748 0 0 1 3.31 4h3.204c.288-.338.59-.665.904-.979l.459-.458A8.749 8.749 0 0 1 14.064 0ZM8.938 3.623h-.002l-.458.458c-.76.76-1.437 1.598-2.02 2.5l-1.5 2.317 2.143 2.143 2.317-1.5c.902-.583 1.74-1.26 2.499-2.02l.459-.458a7.25 7.25 0 0 0 2.123-5.127V1.75a.25.25 0 0 0-.25-.25h-.186a7.249 7.249 0 0 0-5.125 2.123ZM3.56 14.56c-.732.732-2.334 1.045-3.005 1.148a.234.234 0 0 1-.201-.064.234.234 0 0 1-.064-.201c.103-.671.416-2.273 1.15-3.003a1.502 1.502 0 1 1 2.12 2.12Zm6.94-3.935c-.088.06-.177.118-.266.175l-2.35 1.521.548 1.783 1.949-1.2a.25.25 0 0 0 .119-.213ZM3.678 8.116 5.2 5.766c.058-.09.117-.178.176-.266H3.309a.25.25 0 0 0-.213.119l-1.2 1.95ZM12 5a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path>
-</svg>
-</template>
-
-<template id="shield-check-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield-check">
- <path d="m8.533.133 5.25 1.68A1.75 1.75 0 0 1 15 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.697 1.697 0 0 1-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 0 1 1.217-1.667l5.25-1.68a1.748 1.748 0 0 1 1.066 0Zm-.61 1.429.001.001-5.25 1.68a.251.251 0 0 0-.174.237V7c0 1.36.275 2.666 1.057 3.859.784 1.194 2.121 2.342 4.366 3.298a.196.196 0 0 0 .154 0c2.245-.957 3.582-2.103 4.366-3.297C13.225 9.666 13.5 8.358 13.5 7V3.48a.25.25 0 0 0-.174-.238l-5.25-1.68a.25.25 0 0 0-.153 0ZM11.28 6.28l-3.5 3.5a.75.75 0 0 1-1.06 0l-1.5-1.5a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215l.97.97 2.97-2.97a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path>
-</svg>
-</template>
-
-<template id="heart-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-heart">
- <path d="m8 14.25.345.666a.75.75 0 0 1-.69 0l-.008-.004-.018-.01a7.152 7.152 0 0 1-.31-.17 22.055 22.055 0 0 1-3.434-2.414C2.045 10.731 0 8.35 0 5.5 0 2.836 2.086 1 4.25 1 5.797 1 7.153 1.802 8 3.02 8.847 1.802 10.203 1 11.75 1 13.914 1 16 2.836 16 5.5c0 2.85-2.045 5.231-3.885 6.818a22.066 22.066 0 0 1-3.744 2.584l-.018.01-.006.003h-.002ZM4.25 2.5c-1.336 0-2.75 1.164-2.75 3 0 2.15 1.58 4.144 3.365 5.682A20.58 20.58 0 0 0 8 13.393a20.58 20.58 0 0 0 3.135-2.211C12.92 9.644 14.5 7.65 14.5 5.5c0-1.836-1.414-3-2.75-3-1.373 0-2.609.986-3.029 2.456a.749.749 0 0 1-1.442 0C6.859 3.486 5.623 2.5 4.25 2.5Z"></path>
-</svg>
-</template>
-
-<template id="server-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-server">
- <path d="M1.75 1h12.5c.966 0 1.75.784 1.75 1.75v4c0 .372-.116.717-.314 1 .198.283.314.628.314 1v4a1.75 1.75 0 0 1-1.75 1.75H1.75A1.75 1.75 0 0 1 0 12.75v-4c0-.358.109-.707.314-1a1.739 1.739 0 0 1-.314-1v-4C0 1.784.784 1 1.75 1ZM1.5 2.75v4c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-4a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25Zm.25 5.75a.25.25 0 0 0-.25.25v4c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-4a.25.25 0 0 0-.25-.25ZM7 4.75A.75.75 0 0 1 7.75 4h4.5a.75.75 0 0 1 0 1.5h-4.5A.75.75 0 0 1 7 4.75ZM7.75 10h4.5a.75.75 0 0 1 0 1.5h-4.5a.75.75 0 0 1 0-1.5ZM3 4.75A.75.75 0 0 1 3.75 4h.5a.75.75 0 0 1 0 1.5h-.5A.75.75 0 0 1 3 4.75ZM3.75 10h.5a.75.75 0 0 1 0 1.5h-.5a.75.75 0 0 1 0-1.5Z"></path>
-</svg>
-</template>
-
-<template id="globe-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-globe">
- <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM5.78 8.75a9.64 9.64 0 0 0 1.363 4.177c.255.426.542.832.857 1.215.245-.296.551-.705.857-1.215A9.64 9.64 0 0 0 10.22 8.75Zm4.44-1.5a9.64 9.64 0 0 0-1.363-4.177c-.307-.51-.612-.919-.857-1.215a9.927 9.927 0 0 0-.857 1.215A9.64 9.64 0 0 0 5.78 7.25Zm-5.944 1.5H1.543a6.507 6.507 0 0 0 4.666 5.5c-.123-.181-.24-.365-.352-.552-.715-1.192-1.437-2.874-1.581-4.948Zm-2.733-1.5h2.733c.144-2.074.866-3.756 1.58-4.948.12-.197.237-.381.353-.552a6.507 6.507 0 0 0-4.666 5.5Zm10.181 1.5c-.144 2.074-.866 3.756-1.58 4.948-.12.197-.237.381-.353.552a6.507 6.507 0 0 0 4.666-5.5Zm2.733-1.5a6.507 6.507 0 0 0-4.666-5.5c.123.181.24.365.353.552.714 1.192 1.436 2.874 1.58 4.948Z"></path>
-</svg>
-</template>
-
-<template id="issue-opened-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened">
- <path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path><path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Z"></path>
-</svg>
-</template>
-
-<template id="device-mobile-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-mobile">
- <path d="M3.75 0h8.5C13.216 0 14 .784 14 1.75v12.5A1.75 1.75 0 0 1 12.25 16h-8.5A1.75 1.75 0 0 1 2 14.25V1.75C2 .784 2.784 0 3.75 0ZM3.5 1.75v12.5c0 .138.112.25.25.25h8.5a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25h-8.5a.25.25 0 0 0-.25.25ZM8 13a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"></path>
-</svg>
-</template>
-
-<template id="package-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-package">
- <path d="m8.878.392 5.25 3.045c.54.314.872.89.872 1.514v6.098a1.75 1.75 0 0 1-.872 1.514l-5.25 3.045a1.75 1.75 0 0 1-1.756 0l-5.25-3.045A1.75 1.75 0 0 1 1 11.049V4.951c0-.624.332-1.201.872-1.514L7.122.392a1.75 1.75 0 0 1 1.756 0ZM7.875 1.69l-4.63 2.685L8 7.133l4.755-2.758-4.63-2.685a.248.248 0 0 0-.25 0ZM2.5 5.677v5.372c0 .09.047.171.125.216l4.625 2.683V8.432Zm6.25 8.271 4.625-2.683a.25.25 0 0 0 .125-.216V5.677L8.75 8.432Z"></path>
-</svg>
-</template>
-
-<template id="credit-card-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-credit-card">
- <path d="M10.75 9a.75.75 0 0 0 0 1.5h1.5a.75.75 0 0 0 0-1.5h-1.5Z"></path><path d="M0 3.75C0 2.784.784 2 1.75 2h12.5c.966 0 1.75.784 1.75 1.75v8.5A1.75 1.75 0 0 1 14.25 14H1.75A1.75 1.75 0 0 1 0 12.25ZM14.5 6.5h-13v5.75c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25Zm0-2.75a.25.25 0 0 0-.25-.25H1.75a.25.25 0 0 0-.25.25V5h13Z"></path>
-</svg>
-</template>
-
-<template id="play-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play">
- <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm4.879-2.773 4.264 2.559a.25.25 0 0 1 0 .428l-4.264 2.559A.25.25 0 0 1 6 10.559V5.442a.25.25 0 0 1 .379-.215Z"></path>
-</svg>
-</template>
-
-<template id="gift-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-gift">
- <path d="M2 2.75A2.75 2.75 0 0 1 4.75 0c.983 0 1.873.42 2.57 1.232.268.318.497.668.68 1.042.183-.375.411-.725.68-1.044C9.376.42 10.266 0 11.25 0a2.75 2.75 0 0 1 2.45 4h.55c.966 0 1.75.784 1.75 1.75v2c0 .698-.409 1.301-1 1.582v4.918A1.75 1.75 0 0 1 13.25 16H2.75A1.75 1.75 0 0 1 1 14.25V9.332C.409 9.05 0 8.448 0 7.75v-2C0 4.784.784 4 1.75 4h.55c-.192-.375-.3-.8-.3-1.25ZM7.25 9.5H2.5v4.75c0 .138.112.25.25.25h4.5Zm1.5 0v5h4.5a.25.25 0 0 0 .25-.25V9.5Zm0-4V8h5.5a.25.25 0 0 0 .25-.25v-2a.25.25 0 0 0-.25-.25Zm-7 0a.25.25 0 0 0-.25.25v2c0 .138.112.25.25.25h5.5V5.5h-5.5Zm3-4a1.25 1.25 0 0 0 0 2.5h2.309c-.233-.818-.542-1.401-.878-1.793-.43-.502-.915-.707-1.431-.707ZM8.941 4h2.309a1.25 1.25 0 0 0 0-2.5c-.516 0-1 .205-1.43.707-.337.392-.646.975-.879 1.793Z"></path>
-</svg>
-</template>
-
-<template id="code-square-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code-square">
- <path d="M0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25Zm1.75-.25a.25.25 0 0 0-.25.25v12.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25V1.75a.25.25 0 0 0-.25-.25Zm7.47 3.97a.75.75 0 0 1 1.06 0l2 2a.75.75 0 0 1 0 1.06l-2 2a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L10.69 8 9.22 6.53a.75.75 0 0 1 0-1.06ZM6.78 6.53 5.31 8l1.47 1.47a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215l-2-2a.75.75 0 0 1 0-1.06l2-2a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path>
-</svg>
-</template>
-
-<template id="device-desktop-icon">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-device-desktop">
- <path d="M14.25 1c.966 0 1.75.784 1.75 1.75v7.5A1.75 1.75 0 0 1 14.25 12h-3.727c.099 1.041.52 1.872 1.292 2.757A.752.752 0 0 1 11.25 16h-6.5a.75.75 0 0 1-.565-1.243c.772-.885 1.192-1.716 1.292-2.757H1.75A1.75 1.75 0 0 1 0 10.25v-7.5C0 1.784.784 1 1.75 1ZM1.75 2.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25ZM9.018 12H6.982a5.72 5.72 0 0 1-.765 2.5h3.566a5.72 5.72 0 0 1-.765-2.5Z"></path>
-</svg>
-</template>
-
- <div class="position-relative">
- <ul
- role="listbox"
- class="ActionListWrap QueryBuilder-ListWrap"
- aria-label="Suggestions"
- data-action="
- combobox-commit:query-builder#comboboxCommit
- mousedown:query-builder#resultsMousedown
- "
- data-target="query-builder.resultsList"
- data-persist-list=false
- id="query-builder-test-results"
- ></ul>
- </div>
- <div class="FormControl-inlineValidation" id="validation-53a4ff2b-98c2-4da8-91c1-13ad5c19ae70" hidden="hidden">
- <span class="FormControl-inlineValidation--visual">
- <svg aria-hidden="true" height="12" viewBox="0 0 12 12" version="1.1" width="12" data-view-component="true" class="octicon octicon-alert-fill">
- <path d="M4.855.708c.5-.896 1.79-.896 2.29 0l4.675 8.351a1.312 1.312 0 0 1-1.146 1.954H1.33A1.313 1.313 0 0 1 .183 9.058ZM7 7V3H5v4Zm-1 3a1 1 0 1 0 0-2 1 1 0 0 0 0 2Z"></path>
-</svg>
- </span>
- <span></span>
-</div> </div>
- <div data-target="query-builder.screenReaderFeedback" aria-live="polite" aria-atomic="true" class="sr-only"></div>
-</query-builder></form>
- <div class="d-flex flex-row color-fg-muted px-3 text-small color-bg-default search-feedback-prompt">
- <a target="_blank" href="https://docs.github.com/search-github/github-code-search/understanding-github-code-search-syntax" data-view-component="true" class="Link color-fg-accent text-normal ml-2">
- Search syntax tips
-</a> <div class="d-flex flex-1"></div>
- </div>
- </div>
-</div>
-
- </div>
-</modal-dialog></div>
- </div>
- <div data-action="click:qbsearch-input#retract" class="dark-backdrop position-fixed" hidden data-target="qbsearch-input.darkBackdrop"></div>
- <div class="color-fg-default">
-
-<dialog-helper>
- <dialog data-target="qbsearch-input.feedbackDialog" data-action="close:qbsearch-input#handleDialogClose cancel:qbsearch-input#handleDialogClose" id="feedback-dialog" aria-modal="true" aria-labelledby="feedback-dialog-title" aria-describedby="feedback-dialog-description" data-view-component="true" class="Overlay Overlay-whenNarrow Overlay--size-medium Overlay--motion-scaleFade">
- <div data-view-component="true" class="Overlay-header">
- <div class="Overlay-headerContentWrap">
- <div class="Overlay-titleWrap">
- <h1 class="Overlay-title " id="feedback-dialog-title">
- Provide feedback
- </h1>
- </div>
- <div class="Overlay-actionWrap">
- <button data-close-dialog-id="feedback-dialog" aria-label="Close" type="button" data-view-component="true" class="close-button Overlay-closeButton"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x">
- <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path>
-</svg></button>
- </div>
- </div>
-</div>
- <scrollable-region data-labelled-by="feedback-dialog-title">
- <div data-view-component="true" class="Overlay-body"> <!-- '"` --><!-- </textarea></xmp> --></option></form><form id="code-search-feedback-form" data-turbo="false" action="/search/feedback" accept-charset="UTF-8" method="post"><input type="hidden" data-csrf="true" name="authenticity_token" value="7btyZ+aZWPN4PfgbDw4GOaBL1cxzbN2a7lZCBEOgsZDj+6GHgN8TCaS6xLd+zRZ+lYEbQx3B36JJNXc7dxM9kQ==" />
- <p>We read every piece of feedback, and take your input very seriously.</p>
- <textarea name="feedback" class="form-control width-full mb-2" style="height: 120px" id="feedback"></textarea>
- <input name="include_email" id="include_email" aria-label="Include my email address so I can be contacted" class="form-control mr-2" type="checkbox">
- <label for="include_email" style="font-weight: normal">Include my email address so I can be contacted</label>
-</form></div>
- </scrollable-region>
- <div data-view-component="true" class="Overlay-footer Overlay-footer--alignEnd"> <button data-close-dialog-id="feedback-dialog" type="button" data-view-component="true" class="btn"> Cancel
-</button>
- <button form="code-search-feedback-form" data-action="click:qbsearch-input#submitFeedback" type="submit" data-view-component="true" class="btn-primary btn"> Submit feedback
-</button>
-</div>
-</dialog></dialog-helper>
-
- <custom-scopes data-target="qbsearch-input.customScopesManager">
-
-<dialog-helper>
- <dialog data-target="custom-scopes.customScopesModalDialog" data-action="close:qbsearch-input#handleDialogClose cancel:qbsearch-input#handleDialogClose" id="custom-scopes-dialog" aria-modal="true" aria-labelledby="custom-scopes-dialog-title" aria-describedby="custom-scopes-dialog-description" data-view-component="true" class="Overlay Overlay-whenNarrow Overlay--size-medium Overlay--motion-scaleFade">
- <div data-view-component="true" class="Overlay-header Overlay-header--divided">
- <div class="Overlay-headerContentWrap">
- <div class="Overlay-titleWrap">
- <h1 class="Overlay-title " id="custom-scopes-dialog-title">
- Saved searches
- </h1>
- <h2 id="custom-scopes-dialog-description" class="Overlay-description">Use saved searches to filter your results more quickly</h2>
- </div>
- <div class="Overlay-actionWrap">
- <button data-close-dialog-id="custom-scopes-dialog" aria-label="Close" type="button" data-view-component="true" class="close-button Overlay-closeButton"><svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x">
- <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path>
-</svg></button>
- </div>
- </div>
-</div>
- <scrollable-region data-labelled-by="custom-scopes-dialog-title">
- <div data-view-component="true" class="Overlay-body"> <div data-target="custom-scopes.customScopesModalDialogFlash"></div>
-
- <div hidden class="create-custom-scope-form" data-target="custom-scopes.createCustomScopeForm">
- <!-- '"` --><!-- </textarea></xmp> --></option></form><form id="custom-scopes-dialog-form" data-turbo="false" action="/search/custom_scopes" accept-charset="UTF-8" method="post"><input type="hidden" data-csrf="true" name="authenticity_token" value="cKYcZYI+yIXRHzsryEY6B5F0ZvvSORfrAHDS1DjBuqcB/iQIaA7IWs8JZLYdUmGnAzsLIXpVP4lSdIVZ0PubAA==" />
- <div data-target="custom-scopes.customScopesModalDialogFlash"></div>
-
- <input type="hidden" id="custom_scope_id" name="custom_scope_id" data-target="custom-scopes.customScopesIdField">
-
- <div class="form-group">
- <label for="custom_scope_name">Name</label>
- <auto-check src="/search/custom_scopes/check_name" required>
- <input
- type="text"
- name="custom_scope_name"
- id="custom_scope_name"
- data-target="custom-scopes.customScopesNameField"
- class="form-control"
- autocomplete="off"
- placeholder="github-ruby"
- required
- maxlength="50">
- <input type="hidden" data-csrf="true" value="6ZlvAePsEK6L2qzYRdZDUKTihU/qS0RmwvAcX27I+CswZhwti0HbkoryN3Hp+AvkT4XQZM8OnkFigmBy4XyUeg==" />
- </auto-check>
- </div>
-
- <div class="form-group">
- <label for="custom_scope_query">Query</label>
- <input
- type="text"
- name="custom_scope_query"
- id="custom_scope_query"
- data-target="custom-scopes.customScopesQueryField"
- class="form-control"
- autocomplete="off"
- placeholder="(repo:mona/a OR repo:mona/b) AND lang:python"
- required
- maxlength="500">
- </div>
-
- <p class="text-small color-fg-muted">
- To see all available qualifiers, see our <a class="Link--inTextBlock" href="https://docs.github.com/search-github/github-code-search/understanding-github-code-search-syntax">documentation</a>.
- </p>
-</form> </div>
-
- <div data-target="custom-scopes.manageCustomScopesForm">
- <div data-target="custom-scopes.list"></div>
- </div>
-
-</div>
- </scrollable-region>
- <div data-view-component="true" class="Overlay-footer Overlay-footer--alignEnd Overlay-footer--divided"> <button data-action="click:custom-scopes#customScopesCancel" type="button" data-view-component="true" class="btn"> Cancel
-</button>
- <button form="custom-scopes-dialog-form" data-action="click:custom-scopes#customScopesSubmit" data-target="custom-scopes.customScopesSubmitButton" type="submit" data-view-component="true" class="btn-primary btn"> Create saved search
-</button>
-</div>
-</dialog></dialog-helper>
- </custom-scopes>
- </div>
-</qbsearch-input><input type="hidden" data-csrf="true" class="js-data-jump-to-suggestions-path-csrf" value="RBVXfIk8vO4FJU4CkV5ubeEcbnRAZeyFaaaGfDFMkzfmGaTGcNcs3p/7x0+H68AC3o3LnXU7jHli/Oe0tZIh4Q==" />
-
-
- <div class="position-relative mr-lg-3 d-lg-inline-block">
- <a href="/login?return_to=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers"
- class="HeaderMenu-link HeaderMenu-link--sign-in flex-shrink-0 no-underline d-block d-lg-inline-block border border-lg-0 rounded rounded-lg-0 p-2 p-lg-0"
- data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;site header menu&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;SIGN_UP&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="043ed27e05630a128dc9f8f4154090025168c83e7ba31a796c74d6260c24f8af"
- data-ga-click="(Logged out) Header, clicked Sign in, text:sign-in">
- Sign in
- </a>
- </div>
-
- <a href="/signup?ref_cta=Sign+up&amp;ref_loc=header+logged+out&amp;ref_page=%2F%3Cuser-name%3E%2F%3Crepo-name%3E&amp;source=header-repo&amp;source_repo=huggingface%2Ftransformers"
- class="HeaderMenu-link HeaderMenu-link--sign-up flex-shrink-0 d-none d-lg-inline-block no-underline border color-border-default rounded px-2 py-1"
- data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;site header menu&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;SIGN_UP&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="043ed27e05630a128dc9f8f4154090025168c83e7ba31a796c74d6260c24f8af"
- data-analytics-event="{&quot;category&quot;:&quot;Sign up&quot;,&quot;action&quot;:&quot;click to sign up for account&quot;,&quot;label&quot;:&quot;ref_page:/&lt;user-name&gt;/&lt;repo-name&gt;;ref_cta:Sign up;ref_loc:header logged out&quot;}"
- >
- Sign up
- </a>
- </div>
- </div>
- </div>
- </div>
-</header>
-
- <div hidden="hidden" data-view-component="true" class="js-stale-session-flash stale-session-flash flash flash-warn flash-full mb-3">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert">
- <path d="M6.457 1.047c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0 1 14.082 15H1.918a1.75 1.75 0 0 1-1.543-2.575Zm1.763.707a.25.25 0 0 0-.44 0L1.698 13.132a.25.25 0 0 0 .22.368h12.164a.25.25 0 0 0 .22-.368Zm.53 3.996v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 11a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path>
-</svg>
- <span class="js-stale-session-flash-signed-in" hidden>You signed in with another tab or window. <a class="Link--inTextBlock" href="">Reload</a> to refresh your session.</span>
- <span class="js-stale-session-flash-signed-out" hidden>You signed out in another tab or window. <a class="Link--inTextBlock" href="">Reload</a> to refresh your session.</span>
- <span class="js-stale-session-flash-switched" hidden>You switched accounts on another tab or window. <a class="Link--inTextBlock" href="">Reload</a> to refresh your session.</span>
-
- <button id="icon-button-6382fd53-4af0-457f-9db9-95c202b6c7f0" aria-labelledby="tooltip-6d6b7eef-582c-48af-8816-6051da8a9ead" type="button" data-view-component="true" class="Button Button--iconOnly Button--invisible Button--medium flash-close js-flash-close"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x Button-visual">
- <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
-</button><tool-tip id="tooltip-6d6b7eef-582c-48af-8816-6051da8a9ead" for="icon-button-6382fd53-4af0-457f-9db9-95c202b6c7f0" popover="manual" data-direction="s" data-type="label" data-view-component="true" class="sr-only position-absolute">Dismiss alert</tool-tip>
-
-
-
-</div>
- </div>
-
- <div id="start-of-content" class="show-on-focus"></div>
-
-
-
-
-
-
-
-
- <div id="js-flash-container" data-turbo-replace>
-
-
-
-
-
- <template class="js-flash-template">
-
-<div class="flash flash-full {{ className }}">
- <div >
- <button autofocus class="flash-close js-flash-close" type="button" aria-label="Dismiss this message">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x">
- <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
- </button>
- <div aria-atomic="true" role="alert" class="js-flash-alert">
-
- <div>{{ message }}</div>
-
- </div>
- </div>
-</div>
- </template>
-</div>
-
-
-
- <include-fragment class="js-notification-shelf-include-fragment" data-base-src="https://github.com/notifications/beta/shelf"></include-fragment>
-
-
-
-
-
- <div
- class="application-main "
- data-commit-hovercards-enabled
- data-discussion-hovercards-enabled
- data-issue-and-pr-hovercards-enabled
- >
- <div itemscope itemtype="http://schema.org/SoftwareSourceCode" class="">
- <main id="js-repo-pjax-container" >
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- <div id="repository-container-header" class="pt-3 hide-full-screen" style="background-color: var(--page-header-bgColor, var(--color-page-header-bg));" data-turbo-replace>
-
- <div class="d-flex flex-wrap flex-justify-end mb-3 px-3 px-md-4 px-lg-5" style="gap: 1rem;">
-
- <div class="flex-auto min-width-0 width-fit mr-3">
-
- <div class=" d-flex flex-wrap flex-items-center wb-break-word f3 text-normal">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo color-fg-muted mr-2">
- <path d="M2 2.5A2.5 2.5 0 0 1 4.5 0h8.75a.75.75 0 0 1 .75.75v12.5a.75.75 0 0 1-.75.75h-2.5a.75.75 0 0 1 0-1.5h1.75v-2h-8a1 1 0 0 0-.714 1.7.75.75 0 1 1-1.072 1.05A2.495 2.495 0 0 1 2 11.5Zm10.5-1h-8a1 1 0 0 0-1 1v6.708A2.486 2.486 0 0 1 4.5 9h8ZM5 12.25a.25.25 0 0 1 .25-.25h3.5a.25.25 0 0 1 .25.25v3.25a.25.25 0 0 1-.4.2l-1.45-1.087a.249.249 0 0 0-.3 0L5.4 15.7a.25.25 0 0 1-.4-.2Z"></path>
-</svg>
-
- <span class="author flex-self-stretch" itemprop="author">
- <a class="url fn" rel="author" data-hovercard-type="organization" data-hovercard-url="/orgs/huggingface/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self" href="/huggingface">
- huggingface
-</a> </span>
- <span class="mx-1 flex-self-stretch color-fg-muted">/</span>
- <strong itemprop="name" class="mr-2 flex-self-stretch">
- <a data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" href="/huggingface/transformers">transformers</a>
- </strong>
-
- <span></span><span class="Label Label--secondary v-align-middle mr-1">Public</span>
- </div>
-
-
- </div>
-
- <div id="repository-details-container" data-turbo-replace>
- <ul class="pagehead-actions flex-shrink-0 d-none d-md-inline" style="padding: 2px 0;">
-
-
-
- <li>
- <a href="/login?return_to=%2Fhuggingface%2Ftransformers" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;notification subscription menu watch&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f55192246d9f56f676f322f59eee346fc7ab71dda1ff760a833b7cbb089145f2" aria-label="You must be signed in to change notification settings" data-view-component="true" class="tooltipped tooltipped-s btn-sm btn"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell mr-2">
- <path d="M8 16a2 2 0 0 0 1.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 0 0 8 16ZM3 5a5 5 0 0 1 10 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.519 1.519 0 0 1 13.482 13H2.518a1.516 1.516 0 0 1-1.263-2.36l1.703-2.554A.255.255 0 0 0 3 7.947Zm5-3.5A3.5 3.5 0 0 0 4.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.017.017 0 0 0-.003.01l.001.006c0 .002.002.004.004.006l.006.004.007.001h10.964l.007-.001.006-.004.004-.006.001-.007a.017.017 0 0 0-.003-.01l-1.703-2.554a1.745 1.745 0 0 1-.294-.97V5A3.5 3.5 0 0 0 8 1.5Z"></path>
-</svg>Notifications
-</a>
- </li>
-
- <li>
- <a icon="repo-forked" id="fork-button" href="/login?return_to=%2Fhuggingface%2Ftransformers" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;repo details fork button&quot;,&quot;repository_id&quot;:155220641,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="0e09a3be64f0831d090fe3bca9c0d3e48ffb1ed0786de7ad30d4f50691319093" data-view-component="true" class="btn-sm btn"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked mr-2">
- <path d="M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z"></path>
-</svg>Fork
- <span id="repo-network-counter" data-pjax-replace="true" data-turbo-replace="true" title="24,468" data-view-component="true" class="Counter">24.5k</span>
-</a>
- </li>
-
- <li>
- <div data-view-component="true" class="BtnGroup d-flex">
- <a href="/login?return_to=%2Fhuggingface%2Ftransformers" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;star button&quot;,&quot;repository_id&quot;:155220641,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="5181d576671a8f2f702585a71a55976c0eadecbc02c89d085566a938f6396f73" aria-label="You must be signed in to star a repository" data-view-component="true" class="tooltipped tooltipped-s btn-sm btn BtnGroup-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star v-align-text-bottom d-inline-block mr-2">
- <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path>
-</svg><span data-view-component="true" class="d-inline">
- Star
-</span> <span id="repo-stars-counter-star" aria-label="123340 users starred this repository" data-singular-suffix="user starred this repository" data-plural-suffix="users starred this repository" data-turbo-replace="true" title="123,340" data-view-component="true" class="Counter js-social-count">123k</span>
-</a> <button aria-label="You must be signed in to add this repository to a list" type="button" disabled="disabled" data-view-component="true" class="btn-sm btn BtnGroup-item px-2"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-triangle-down">
- <path d="m4.427 7.427 3.396 3.396a.25.25 0 0 0 .354 0l3.396-3.396A.25.25 0 0 0 11.396 7H4.604a.25.25 0 0 0-.177.427Z"></path>
-</svg>
-</button></div>
- </li>
-
- <li>
-
-
- </li>
-</ul>
-
- </div>
- </div>
-
- <div id="responsive-meta-container" data-turbo-replace>
- <div class="d-block d-md-none mb-2 px-3 px-md-4 px-lg-5">
- <p class="f4 mb-3 ">
- 🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX.
- </p>
- <div class="mb-2 d-flex flex-items-center Link--secondary">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link flex-shrink-0 mr-2">
- <path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path>
-</svg>
- <span class="flex-auto min-width-0 css-truncate css-truncate-target width-fit">
- <a title="https://huggingface.co/transformers" role="link" target="_blank" class="text-bold" rel="noopener noreferrer" href="https://huggingface.co/transformers">huggingface.co/transformers</a>
- </span>
- </div>
-
-
- <h3 class="sr-only">License</h3>
- <div class="mb-2">
- <a href="/huggingface/transformers/blob/main/LICENSE"
- class="Link--muted"
-
- data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:license&quot;}"
- >
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-law mr-2">
- <path d="M8.75.75V2h.985c.304 0 .603.08.867.231l1.29.736c.038.022.08.033.124.033h2.234a.75.75 0 0 1 0 1.5h-.427l2.111 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.006.005-.01.01-.045.04c-.21.176-.441.327-.686.45C14.556 10.78 13.88 11 13 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L12.178 4.5h-.162c-.305 0-.604-.079-.868-.231l-1.29-.736a.245.245 0 0 0-.124-.033H8.75V13h2.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1 0-1.5h2.5V3.5h-.984a.245.245 0 0 0-.124.033l-1.289.737c-.265.15-.564.23-.869.23h-.162l2.112 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.016.015-.045.04c-.21.176-.441.327-.686.45C4.556 10.78 3.88 11 3 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L2.178 4.5H1.75a.75.75 0 0 1 0-1.5h2.234a.249.249 0 0 0 .125-.033l1.288-.737c.265-.15.564-.23.869-.23h.984V.75a.75.75 0 0 1 1.5 0Zm2.945 8.477c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L13 6.327Zm-10 0c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L3 6.327Z"></path>
-</svg>
- Apache-2.0 license
- </a>
- </div>
-
-
- <div class="mb-3">
- <a class="Link--secondary no-underline mr-3" href="/huggingface/transformers/stargazers">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star mr-1">
- <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path>
-</svg>
- <span class="text-bold">123k</span>
- stars
-</a> <a class="Link--secondary no-underline mr-3" href="/huggingface/transformers/forks">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked mr-1">
- <path d="M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z"></path>
-</svg>
- <span class="text-bold">24.5k</span>
- forks
-</a> <a class="Link--secondary no-underline mr-3 d-inline-block" href="/huggingface/transformers/branches">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-branch mr-1">
- <path d="M9.5 3.25a2.25 2.25 0 1 1 3 2.122V6A2.5 2.5 0 0 1 10 8.5H6a1 1 0 0 0-1 1v1.128a2.251 2.251 0 1 1-1.5 0V5.372a2.25 2.25 0 1 1 1.5 0v1.836A2.493 2.493 0 0 1 6 7h4a1 1 0 0 0 1-1v-.628A2.25 2.25 0 0 1 9.5 3.25Zm-6 0a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Zm8.25-.75a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5ZM4.25 12a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Z"></path>
-</svg>
- <span>Branches</span>
-</a> <a class="Link--secondary no-underline d-inline-block" href="/huggingface/transformers/tags">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-tag mr-1">
- <path d="M1 7.775V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.752 1.752 0 0 1 1 7.775Zm1.5 0c0 .066.026.13.073.177l6.25 6.25a.25.25 0 0 0 .354 0l5.025-5.025a.25.25 0 0 0 0-.354l-6.25-6.25a.25.25 0 0 0-.177-.073H2.75a.25.25 0 0 0-.25.25ZM6 5a1 1 0 1 1 0 2 1 1 0 0 1 0-2Z"></path>
-</svg>
- <span>Tags</span>
-</a> <a class="Link--secondary no-underline d-inline-block" href="/huggingface/transformers/activity">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pulse mr-1">
- <path d="M6 2c.306 0 .582.187.696.471L10 10.731l1.304-3.26A.751.751 0 0 1 12 7h3.25a.75.75 0 0 1 0 1.5h-2.742l-1.812 4.528a.751.751 0 0 1-1.392 0L6 4.77 4.696 8.03A.75.75 0 0 1 4 8.5H.75a.75.75 0 0 1 0-1.5h2.742l1.812-4.529A.751.751 0 0 1 6 2Z"></path>
-</svg>
- <span>Activity</span>
-</a> </div>
-
- <div class="d-flex flex-wrap gap-2">
- <div class="flex-1">
- <div data-view-component="true" class="BtnGroup d-flex">
- <a href="/login?return_to=%2Fhuggingface%2Ftransformers" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;star button&quot;,&quot;repository_id&quot;:155220641,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="5181d576671a8f2f702585a71a55976c0eadecbc02c89d085566a938f6396f73" aria-label="You must be signed in to star a repository" data-view-component="true" class="tooltipped tooltipped-s btn-sm btn btn-block BtnGroup-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star v-align-text-bottom d-inline-block mr-2">
- <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path>
-</svg><span data-view-component="true" class="d-inline">
- Star
-</span>
-</a> <button aria-label="You must be signed in to add this repository to a list" type="button" disabled="disabled" data-view-component="true" class="btn-sm btn BtnGroup-item px-2"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-triangle-down">
- <path d="m4.427 7.427 3.396 3.396a.25.25 0 0 0 .354 0l3.396-3.396A.25.25 0 0 0 11.396 7H4.604a.25.25 0 0 0-.177.427Z"></path>
-</svg>
-</button></div>
- </div>
- <div class="flex-1">
- <a href="/login?return_to=%2Fhuggingface%2Ftransformers" rel="nofollow" data-hydro-click="{&quot;event_type&quot;:&quot;authentication.click&quot;,&quot;payload&quot;:{&quot;location_in_page&quot;:&quot;notification subscription menu watch&quot;,&quot;repository_id&quot;:null,&quot;auth_type&quot;:&quot;LOG_IN&quot;,&quot;originating_url&quot;:&quot;https://github.com/huggingface/transformers&quot;,&quot;user_id&quot;:null}}" data-hydro-click-hmac="f55192246d9f56f676f322f59eee346fc7ab71dda1ff760a833b7cbb089145f2" aria-label="You must be signed in to change notification settings" data-view-component="true" class="tooltipped tooltipped-s btn-sm btn btn-block"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-bell mr-2">
- <path d="M8 16a2 2 0 0 0 1.985-1.75c.017-.137-.097-.25-.235-.25h-3.5c-.138 0-.252.113-.235.25A2 2 0 0 0 8 16ZM3 5a5 5 0 0 1 10 0v2.947c0 .05.015.098.042.139l1.703 2.555A1.519 1.519 0 0 1 13.482 13H2.518a1.516 1.516 0 0 1-1.263-2.36l1.703-2.554A.255.255 0 0 0 3 7.947Zm5-3.5A3.5 3.5 0 0 0 4.5 5v2.947c0 .346-.102.683-.294.97l-1.703 2.556a.017.017 0 0 0-.003.01l.001.006c0 .002.002.004.004.006l.006.004.007.001h10.964l.007-.001.006-.004.004-.006.001-.007a.017.017 0 0 0-.003-.01l-1.703-2.554a1.745 1.745 0 0 1-.294-.97V5A3.5 3.5 0 0 0 8 1.5Z"></path>
-</svg>Notifications
-</a>
- </div>
- <span>
-
-
- </span>
- </div>
- </div>
-
-</div>
-
-
- <nav data-pjax="#js-repo-pjax-container" aria-label="Repository" data-view-component="true" class="js-repo-nav js-sidenav-container-pjax js-responsive-underlinenav overflow-hidden UnderlineNav px-3 px-md-4 px-lg-5">
-
- <ul data-view-component="true" class="UnderlineNav-body list-style-none">
- <li data-view-component="true" class="d-inline-flex">
- <a id="code-tab" href="/huggingface/transformers" data-tab-item="i0code-tab" data-selected-links="repo_source repo_downloads repo_commits repo_releases repo_tags repo_branches repo_packages repo_deployments repo_attestations /huggingface/transformers" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g c" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Code&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" aria-current="page" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item selected">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code UnderlineNav-octicon d-none d-sm-inline">
- <path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
- <span data-content="Code">Code</span>
- <span id="code-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span>
-
-
-
-</a></li>
- <li data-view-component="true" class="d-inline-flex">
- <a id="issues-tab" href="/huggingface/transformers/issues" data-tab-item="i1issues-tab" data-selected-links="repo_issues repo_labels repo_milestones /huggingface/transformers/issues" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g i" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Issues&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened UnderlineNav-octicon d-none d-sm-inline">
- <path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path><path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Z"></path>
-</svg>
- <span data-content="Issues">Issues</span>
- <span id="issues-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="794" data-view-component="true" class="Counter">794</span>
-
-
-
-</a></li>
- <li data-view-component="true" class="d-inline-flex">
- <a id="pull-requests-tab" href="/huggingface/transformers/pulls" data-tab-item="i2pull-requests-tab" data-selected-links="repo_pulls checks /huggingface/transformers/pulls" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g p" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Pull requests&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-pull-request UnderlineNav-octicon d-none d-sm-inline">
- <path d="M1.5 3.25a2.25 2.25 0 1 1 3 2.122v5.256a2.251 2.251 0 1 1-1.5 0V5.372A2.25 2.25 0 0 1 1.5 3.25Zm5.677-.177L9.573.677A.25.25 0 0 1 10 .854V2.5h1A2.5 2.5 0 0 1 13.5 5v5.628a2.251 2.251 0 1 1-1.5 0V5a1 1 0 0 0-1-1h-1v1.646a.25.25 0 0 1-.427.177L7.177 3.427a.25.25 0 0 1 0-.354ZM3.75 2.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm0 9.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm8.25.75a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Z"></path>
-</svg>
- <span data-content="Pull requests">Pull requests</span>
- <span id="pull-requests-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="259" data-view-component="true" class="Counter">259</span>
-
-
-
-</a></li>
- <li data-view-component="true" class="d-inline-flex">
- <a id="actions-tab" href="/huggingface/transformers/actions" data-tab-item="i3actions-tab" data-selected-links="repo_actions /huggingface/transformers/actions" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g a" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Actions&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play UnderlineNav-octicon d-none d-sm-inline">
- <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm4.879-2.773 4.264 2.559a.25.25 0 0 1 0 .428l-4.264 2.559A.25.25 0 0 1 6 10.559V5.442a.25.25 0 0 1 .379-.215Z"></path>
-</svg>
- <span data-content="Actions">Actions</span>
- <span id="actions-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span>
-
-
-
-</a></li>
- <li data-view-component="true" class="d-inline-flex">
- <a id="projects-tab" href="/huggingface/transformers/projects" data-tab-item="i4projects-tab" data-selected-links="repo_projects new_repo_project repo_project /huggingface/transformers/projects" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g b" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Projects&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-table UnderlineNav-octicon d-none d-sm-inline">
- <path d="M0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25ZM6.5 6.5v8h7.75a.25.25 0 0 0 .25-.25V6.5Zm8-1.5V1.75a.25.25 0 0 0-.25-.25H6.5V5Zm-13 1.5v7.75c0 .138.112.25.25.25H5v-8ZM5 5V1.5H1.75a.25.25 0 0 0-.25.25V5Z"></path>
-</svg>
- <span data-content="Projects">Projects</span>
- <span id="projects-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="25" data-view-component="true" class="Counter">25</span>
-
-
-
-</a></li>
- <li data-view-component="true" class="d-inline-flex">
- <a id="security-tab" href="/huggingface/transformers/security" data-tab-item="i5security-tab" data-selected-links="security overview alerts policy token_scanning code_scanning /huggingface/transformers/security" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-hotkey="g s" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Security&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield UnderlineNav-octicon d-none d-sm-inline">
- <path d="M7.467.133a1.748 1.748 0 0 1 1.066 0l5.25 1.68A1.75 1.75 0 0 1 15 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.697 1.697 0 0 1-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 0 1 1.217-1.667Zm.61 1.429a.25.25 0 0 0-.153 0l-5.25 1.68a.25.25 0 0 0-.174.238V7c0 1.358.275 2.666 1.057 3.86.784 1.194 2.121 2.34 4.366 3.297a.196.196 0 0 0 .154 0c2.245-.956 3.582-2.104 4.366-3.298C13.225 9.666 13.5 8.36 13.5 7V3.48a.251.251 0 0 0-.174-.237l-5.25-1.68ZM8.75 4.75v3a.75.75 0 0 1-1.5 0v-3a.75.75 0 0 1 1.5 0ZM9 10.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path>
-</svg>
- <span data-content="Security">Security</span>
- <include-fragment src="/huggingface/transformers/security/overall-count" accept="text/fragment+html"></include-fragment>
-
-
-</a></li>
- <li data-view-component="true" class="d-inline-flex">
- <a id="insights-tab" href="/huggingface/transformers/pulse" data-tab-item="i6insights-tab" data-selected-links="repo_graphs repo_contributors dependency_graph dependabot_updates pulse people community /huggingface/transformers/pulse" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" data-analytics-event="{&quot;category&quot;:&quot;Underline navbar&quot;,&quot;action&quot;:&quot;Click tab&quot;,&quot;label&quot;:&quot;Insights&quot;,&quot;target&quot;:&quot;UNDERLINE_NAV.TAB&quot;}" data-view-component="true" class="UnderlineNav-item no-wrap js-responsive-underlinenav-item js-selected-navigation-item">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-graph UnderlineNav-octicon d-none d-sm-inline">
- <path d="M1.5 1.75V13.5h13.75a.75.75 0 0 1 0 1.5H.75a.75.75 0 0 1-.75-.75V1.75a.75.75 0 0 1 1.5 0Zm14.28 2.53-5.25 5.25a.75.75 0 0 1-1.06 0L7 7.06 4.28 9.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.25-3.25a.75.75 0 0 1 1.06 0L10 7.94l4.72-4.72a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path>
-</svg>
- <span data-content="Insights">Insights</span>
- <span id="insights-repo-tab-count" data-pjax-replace="" data-turbo-replace="" title="Not available" data-view-component="true" class="Counter"></span>
-
-
-
-</a></li>
-</ul>
- <div style="visibility:hidden;" data-view-component="true" class="UnderlineNav-actions js-responsive-underlinenav-overflow position-absolute pr-3 pr-md-4 pr-lg-5 right-0"> <action-menu data-select-variant="none" data-view-component="true">
- <focus-group direction="vertical" mnemonics retain>
- <button id="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-button" popovertarget="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-overlay" aria-controls="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-list" aria-haspopup="true" aria-labelledby="tooltip-0ad90b7d-0588-40d9-9e83-79e6b685b605" type="button" data-view-component="true" class="Button Button--iconOnly Button--secondary Button--medium UnderlineNav-item"> <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-kebab-horizontal Button-visual">
- <path d="M8 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3ZM1.5 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Zm13 0a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path>
-</svg>
-</button><tool-tip id="tooltip-0ad90b7d-0588-40d9-9e83-79e6b685b605" for="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-button" popover="manual" data-direction="s" data-type="label" data-view-component="true" class="sr-only position-absolute">Additional navigation options</tool-tip>
-
-
-<anchored-position id="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-overlay" anchor="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-button" align="start" side="outside-bottom" anchor-offset="normal" popover="auto" data-view-component="true">
- <div data-view-component="true" class="Overlay Overlay--size-auto">
-
- <div data-view-component="true" class="Overlay-body Overlay-body--paddingNone"> <action-list>
- <div data-view-component="true">
- <ul aria-labelledby="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-button" id="action-menu-fd92d694-97bf-4274-b898-6bb6b85e5d98-list" role="menu" data-view-component="true" class="ActionListWrap--inset ActionListWrap">
- <li hidden="hidden" data-menu-item="i0code-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-327325a5-b7be-4540-898d-09214b18303f" href="/huggingface/transformers" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code">
- <path d="m11.28 3.22 4.25 4.25a.75.75 0 0 1 0 1.06l-4.25 4.25a.749.749 0 0 1-1.275-.326.749.749 0 0 1 .215-.734L13.94 8l-3.72-3.72a.749.749 0 0 1 .326-1.275.749.749 0 0 1 .734.215Zm-6.56 0a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042L2.06 8l3.72 3.72a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L.47 8.53a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Code
-</span></a>
-
-
-</li>
- <li hidden="hidden" data-menu-item="i1issues-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-a80e637e-553a-4207-80ac-f0a119167d4a" href="/huggingface/transformers/issues" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-issue-opened">
- <path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z"></path><path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Issues
-</span></a>
-
-
-</li>
- <li hidden="hidden" data-menu-item="i2pull-requests-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-a1fe5315-201a-4f0e-8440-c45c5fe9da78" href="/huggingface/transformers/pulls" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-git-pull-request">
- <path d="M1.5 3.25a2.25 2.25 0 1 1 3 2.122v5.256a2.251 2.251 0 1 1-1.5 0V5.372A2.25 2.25 0 0 1 1.5 3.25Zm5.677-.177L9.573.677A.25.25 0 0 1 10 .854V2.5h1A2.5 2.5 0 0 1 13.5 5v5.628a2.251 2.251 0 1 1-1.5 0V5a1 1 0 0 0-1-1h-1v1.646a.25.25 0 0 1-.427.177L7.177 3.427a.25.25 0 0 1 0-.354ZM3.75 2.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm0 9.5a.75.75 0 1 0 0 1.5.75.75 0 0 0 0-1.5Zm8.25.75a.75.75 0 1 0 1.5 0 .75.75 0 0 0-1.5 0Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Pull requests
-</span></a>
-
-
-</li>
- <li hidden="hidden" data-menu-item="i3actions-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-db809349-3ecf-414f-b8b3-09af27497a2a" href="/huggingface/transformers/actions" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-play">
- <path d="M8 0a8 8 0 1 1 0 16A8 8 0 0 1 8 0ZM1.5 8a6.5 6.5 0 1 0 13 0 6.5 6.5 0 0 0-13 0Zm4.879-2.773 4.264 2.559a.25.25 0 0 1 0 .428l-4.264 2.559A.25.25 0 0 1 6 10.559V5.442a.25.25 0 0 1 .379-.215Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Actions
-</span></a>
-
-
-</li>
- <li hidden="hidden" data-menu-item="i4projects-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-152a5da9-4ace-44dc-9c50-4488bba7e49e" href="/huggingface/transformers/projects" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-table">
- <path d="M0 1.75C0 .784.784 0 1.75 0h12.5C15.216 0 16 .784 16 1.75v12.5A1.75 1.75 0 0 1 14.25 16H1.75A1.75 1.75 0 0 1 0 14.25ZM6.5 6.5v8h7.75a.25.25 0 0 0 .25-.25V6.5Zm8-1.5V1.75a.25.25 0 0 0-.25-.25H6.5V5Zm-13 1.5v7.75c0 .138.112.25.25.25H5v-8ZM5 5V1.5H1.75a.25.25 0 0 0-.25.25V5Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Projects
-</span></a>
-
-
-</li>
- <li hidden="hidden" data-menu-item="i5security-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-be421f87-6eb0-47e8-b193-9d8de654bbf0" href="/huggingface/transformers/security" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-shield">
- <path d="M7.467.133a1.748 1.748 0 0 1 1.066 0l5.25 1.68A1.75 1.75 0 0 1 15 3.48V7c0 1.566-.32 3.182-1.303 4.682-.983 1.498-2.585 2.813-5.032 3.855a1.697 1.697 0 0 1-1.33 0c-2.447-1.042-4.049-2.357-5.032-3.855C1.32 10.182 1 8.566 1 7V3.48a1.75 1.75 0 0 1 1.217-1.667Zm.61 1.429a.25.25 0 0 0-.153 0l-5.25 1.68a.25.25 0 0 0-.174.238V7c0 1.358.275 2.666 1.057 3.86.784 1.194 2.121 2.34 4.366 3.297a.196.196 0 0 0 .154 0c2.245-.956 3.582-2.104 4.366-3.298C13.225 9.666 13.5 8.36 13.5 7V3.48a.251.251 0 0 0-.174-.237l-5.25-1.68ZM8.75 4.75v3a.75.75 0 0 1-1.5 0v-3a.75.75 0 0 1 1.5 0ZM9 10.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Security
-</span></a>
-
-
-</li>
- <li hidden="hidden" data-menu-item="i6insights-tab" data-targets="action-list.items" role="none" data-view-component="true" class="ActionListItem">
-
- <a tabindex="-1" id="item-c54ba890-bc22-4d80-b970-63ed90b2690f" href="/huggingface/transformers/pulse" role="menuitem" data-view-component="true" class="ActionListContent ActionListContent--visual16">
- <span class="ActionListItem-visual ActionListItem-visual--leading">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-graph">
- <path d="M1.5 1.75V13.5h13.75a.75.75 0 0 1 0 1.5H.75a.75.75 0 0 1-.75-.75V1.75a.75.75 0 0 1 1.5 0Zm14.28 2.53-5.25 5.25a.75.75 0 0 1-1.06 0L7 7.06 4.28 9.78a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042l3.25-3.25a.75.75 0 0 1 1.06 0L10 7.94l4.72-4.72a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042Z"></path>
-</svg>
- </span>
-
- <span data-view-component="true" class="ActionListItem-label">
- Insights
-</span></a>
-
-
-</li>
-</ul>
-</div></action-list>
-
-
-</div>
-
-</div></anchored-position> </focus-group>
-</action-menu></div>
-</nav>
-
- </div>
-
-
-
-
-
-<turbo-frame id="repo-content-turbo-frame" target="_top" data-turbo-action="advance" class="">
- <div id="repo-content-pjax-container" class="repository-content " >
-
-
-
-
-
-
- <h1 class='sr-only'>huggingface/transformers</h1>
- <div class="clearfix container-xl px-md-4 px-lg-5 px-3">
- <div>
-
- <div id="spoof-warning" class="mt-0 pb-3" hidden aria-hidden>
- <div data-view-component="true" class="flash flash-warn mt-0 clearfix">
-
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert float-left mt-1">
- <path d="M6.457 1.047c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0 1 14.082 15H1.918a1.75 1.75 0 0 1-1.543-2.575Zm1.763.707a.25.25 0 0 0-.44 0L1.698 13.132a.25.25 0 0 0 .22.368h12.164a.25.25 0 0 0 .22-.368Zm.53 3.996v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 11a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path>
-</svg>
-
- <div class="overflow-hidden">This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.</div>
-
-
-
-</div></div>
-
- <include-fragment src="/huggingface/transformers/spoofed_commit_check/5ad7f17002f304b1e880fe2333c7deba95d12f4e" data-test-selector="spoofed-commit-check"></include-fragment>
-
- <div style="max-width: 100%" data-view-component="true" class="Layout Layout--flowRow-until-md react-repos-overview-margin Layout--sidebarPosition-end Layout--sidebarPosition-flowRow-end">
- <div data-view-component="true" class="Layout-main">
-
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/react-lib-1fbfc5be2c18.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_octicons-react_dist_index_esm_js-node_modules_primer_react_lib-es-541a38-6ce7d7c3f9ee.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_Box_Box_js-8f8c5e2a2cbf.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_Button_Button_js-d5726d25c548.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_ActionList_index_js-1501d3ef83c2.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_Overlay_Overlay_js-node_modules_primer_react_lib-es-fa1130-829932cf63db.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_ActionMenu_ActionMenu_js-eaf74522e470.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_github_catalyst_lib_index_js-node_modules_github_hydro-analytics-client_-4da1df-9de8d527f925.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_react-router-dom_dist_index_js-3b41341d50fe.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_Dialog_js-node_modules_primer_react_lib-esm_TabNav_-8321f5-2969c7508f3a.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_UnderlineNav_index_js-89fa5806aa3c.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_AvatarStack_AvatarStack_js-node_modules_primer_reac-e8df95-53b8f38a33de.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/vendors-node_modules_primer_react_lib-esm_Dialog_Dialog_js-node_modules_primer_react_lib-esm_-114c24-1306e57fe74e.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_react-core_create-browser-history_ts-ui_packages_react-core_AppContextProvider_ts-809ab9-5bc018b15303.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_paths_index_ts-8a9f668f1de0.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_ref-selector_RefSelector_tsx-dbbdef4348e2.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/ui_packages_commit-attribution_index_ts-ui_packages_commit-checks-status_index_ts-ui_packages-a73d65-239b92c64d22.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/app_assets_modules_react-shared_hooks_use-canonical-object_ts-ui_packages_code-view-shared_ho-3e492a-cde4692d0c71.js"></script>
-<script crossorigin="anonymous" defer="defer" type="application/javascript" src="https://github.githubassets.com/assets/repos-overview-30410f2fe329.js"></script>
-
-<react-partial
- partial-name="repos-overview"
- data-ssr="false"
->
-
- <script type="application/json" data-target="react-partial.embeddedData">{"props":{"initialPayload":{"allShortcutsEnabled":false,"path":"/","repo":{"id":155220641,"defaultBranch":"main","name":"transformers","ownerLogin":"huggingface","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2018-10-29T13:56:00.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/25720743?v=4","public":true,"private":false,"isOrgOwned":true},"currentUser":null,"refInfo":{"name":"main","listCacheKey":"v0:1711709481.0","canEdit":false,"refType":"branch","currentOid":"5ad7f17002f304b1e880fe2333c7deba95d12f4e"},"tree":{"items":[{"name":".circleci","path":".circleci","contentType":"directory"},{"name":".github","path":".github","contentType":"directory"},{"name":"docker","path":"docker","contentType":"directory"},{"name":"docs","path":"docs","contentType":"directory"},{"name":"examples","path":"examples","contentType":"directory"},{"name":"model_cards","path":"model_cards","contentType":"directory"},{"name":"notebooks","path":"notebooks","contentType":"directory"},{"name":"scripts","path":"scripts","contentType":"directory"},{"name":"src/transformers","path":"src/transformers","contentType":"directory","hasSimplifiedPath":true},{"name":"templates","path":"templates","contentType":"directory"},{"name":"tests","path":"tests","contentType":"directory"},{"name":"utils","path":"utils","contentType":"directory"},{"name":".coveragerc","path":".coveragerc","contentType":"file"},{"name":".gitattributes","path":".gitattributes","contentType":"file"},{"name":".gitignore","path":".gitignore","contentType":"file"},{"name":"CITATION.cff","path":"CITATION.cff","contentType":"file"},{"name":"CODE_OF_CONDUCT.md","path":"CODE_OF_CONDUCT.md","contentType":"file"},{"name":"CONTRIBUTING.md","path":"CONTRIBUTING.md","contentType":"file"},{"name":"ISSUES.md","path":"ISSUES.md","contentType":"file"},{"name":"LICENSE","path":"LICENSE","contentType":"file"},{"name":"Makefile","path":"Makefile","contentType":"file"},{"name":"README.md","path":"README.md","contentType":"file"},{"name":"README_de.md","path":"README_de.md","contentType":"file"},{"name":"README_es.md","path":"README_es.md","contentType":"file"},{"name":"README_fr.md","path":"README_fr.md","contentType":"file"},{"name":"README_hd.md","path":"README_hd.md","contentType":"file"},{"name":"README_ja.md","path":"README_ja.md","contentType":"file"},{"name":"README_ko.md","path":"README_ko.md","contentType":"file"},{"name":"README_pt-br.md","path":"README_pt-br.md","contentType":"file"},{"name":"README_ru.md","path":"README_ru.md","contentType":"file"},{"name":"README_te.md","path":"README_te.md","contentType":"file"},{"name":"README_vi.md","path":"README_vi.md","contentType":"file"},{"name":"README_zh-hans.md","path":"README_zh-hans.md","contentType":"file"},{"name":"README_zh-hant.md","path":"README_zh-hant.md","contentType":"file"},{"name":"SECURITY.md","path":"SECURITY.md","contentType":"file"},{"name":"awesome-transformers.md","path":"awesome-transformers.md","contentType":"file"},{"name":"conftest.py","path":"conftest.py","contentType":"file"},{"name":"hubconf.py","path":"hubconf.py","contentType":"file"},{"name":"pyproject.toml","path":"pyproject.toml","contentType":"file"},{"name":"setup.py","path":"setup.py","contentType":"file"}],"templateDirectorySuggestionUrl":null,"readme":null,"totalCount":40,"showBranchInfobar":false},"fileTree":null,"fileTreeProcessingTime":null,"foldersToFetch":[],"treeExpanded":false,"symbolsExpanded":false,"isOverview":true,"overview":{"banners":{"shouldRecommendReadme":false,"isPersonalRepo":false,"showUseActionBanner":false,"actionSlug":null,"actionId":null,"showProtectBranchBanner":false,"recentlyTouchedDataChannel":null,"publishBannersInfo":{"dismissActionNoticePath":"/settings/dismiss-notice/publish_action_from_repo","releasePath":"/huggingface/transformers/releases/new?marketplace=true","showPublishActionBanner":false},"interactionLimitBanner":null,"showInvitationBanner":false,"inviterName":null},"codeButton":{"contactPath":"/contact","isEnterprise":false,"local":{"protocolInfo":{"httpAvailable":true,"sshAvailable":null,"httpUrl":"https://github.com/huggingface/transformers.git","showCloneWarning":null,"sshUrl":null,"sshCertificatesRequired":null,"sshCertificatesAvailable":null,"ghCliUrl":"gh repo clone huggingface/transformers","defaultProtocol":"http","newSshKeyUrl":"/settings/ssh/new","setProtocolPath":"/users/set_protocol"},"platformInfo":{"cloneUrl":"https://desktop.github.com","showVisualStudioCloneButton":false,"visualStudioCloneUrl":"https://windows.github.com","showXcodeCloneButton":false,"xcodeCloneUrl":"https://developer.apple.com","zipballUrl":"/huggingface/transformers/archive/refs/heads/main.zip"}},"newCodespacePath":"/codespaces/new?hide_repo_select=true\u0026repo=155220641"},"popovers":{"rename":null,"renamedParentRepo":null},"commitCount":"15,475","overviewFiles":[{"displayName":"README.md","repoName":"transformers","refName":"main","path":"README.md","preferredFileType":"readme","tabName":"README","richText":"\u003carticle class=\"markdown-body entry-content container-lg\" itemprop=\"text\"\u003e\n\u003cp align=\"center\" dir=\"auto\"\u003e\n \u003cthemed-picture data-catalyst-inline=\"true\"\u003e\u003cpicture\u003e\n \u003csource media=\"(prefers-color-scheme: dark)\" srcset=\"https://camo.githubusercontent.com/b0242aba08cbaf9670cb3f59bb946606c709c80409bf20141a01ded0f889d0b0/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7261772f6d61696e2f7472616e73666f726d6572732d6c6f676f2d6461726b2e737667\" data-canonical-src=\"https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-dark.svg\"\u003e\n \u003csource media=\"(prefers-color-scheme: light)\" srcset=\"https://camo.githubusercontent.com/19694a747faa4c55cbdb1cab99086099c6cf961930712f87ab3469e9bf706a4f/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7261772f6d61696e2f7472616e73666f726d6572732d6c6f676f2d6c696768742e737667\" data-canonical-src=\"https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-light.svg\"\u003e\n \u003cimg alt=\"Hugging Face Transformers Library\" src=\"https://camo.githubusercontent.com/19694a747faa4c55cbdb1cab99086099c6cf961930712f87ab3469e9bf706a4f/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7261772f6d61696e2f7472616e73666f726d6572732d6c6f676f2d6c696768742e737667\" width=\"352\" height=\"59\" style=\"max-width: 100%;\" data-canonical-src=\"https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-light.svg\"\u003e\n \u003c/picture\u003e\u003c/themed-picture\u003e\n \u003cbr\u003e\n \u003cbr\u003e\n\u003c/p\u003e\n\u003cp align=\"center\" dir=\"auto\"\u003e\n \u003ca href=\"https://circleci.com/gh/huggingface/transformers\" rel=\"nofollow\"\u003e\n \u003cimg alt=\"Build\" src=\"https://camo.githubusercontent.com/7e0d9e9f10088f3210281bc600989392d6784232aac30c653e2fabbc5bd7a2f0/68747470733a2f2f696d672e736869656c64732e696f2f636972636c6563692f6275696c642f6769746875622f68756767696e67666163652f7472616e73666f726d6572732f6d61696e\" data-canonical-src=\"https://img.shields.io/circleci/build/github/huggingface/transformers/main\" style=\"max-width: 100%;\"\u003e\n \u003c/a\u003e\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/LICENSE\"\u003e\n \u003cimg alt=\"GitHub\" src=\"https://camo.githubusercontent.com/d0afd99731b850439b62e2551826aa4c3b32369f8c15c8f0b02ad2277479c242/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f6c6963656e73652f68756767696e67666163652f7472616e73666f726d6572732e7376673f636f6c6f723d626c7565\" data-canonical-src=\"https://img.shields.io/github/license/huggingface/transformers.svg?color=blue\" style=\"max-width: 100%;\"\u003e\n \u003c/a\u003e\n \u003ca href=\"https://huggingface.co/docs/transformers/index\" rel=\"nofollow\"\u003e\n \u003cimg alt=\"Documentation\" src=\"https://camo.githubusercontent.com/6f3767a6d933301807cdfd2c597a101a8a446815060455f9421336bd2217696f/68747470733a2f2f696d672e736869656c64732e696f2f776562736974652f687474702f68756767696e67666163652e636f2f646f63732f7472616e73666f726d6572732f696e6465782e7376673f646f776e5f636f6c6f723d72656426646f776e5f6d6573736167653d6f66666c696e652675705f6d6573736167653d6f6e6c696e65\" data-canonical-src=\"https://img.shields.io/website/http/huggingface.co/docs/transformers/index.svg?down_color=red\u0026amp;down_message=offline\u0026amp;up_message=online\" style=\"max-width: 100%;\"\u003e\n \u003c/a\u003e\n \u003ca href=\"https://github.com/huggingface/transformers/releases\"\u003e\n \u003cimg alt=\"GitHub release\" src=\"https://camo.githubusercontent.com/382e88f4824cc860ca2461982aaa461a7e6546fe6e9e929043511b80f3cf0662/68747470733a2f2f696d672e736869656c64732e696f2f6769746875622f72656c656173652f68756767696e67666163652f7472616e73666f726d6572732e737667\" data-canonical-src=\"https://img.shields.io/github/release/huggingface/transformers.svg\" style=\"max-width: 100%;\"\u003e\n \u003c/a\u003e\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/CODE_OF_CONDUCT.md\"\u003e\n \u003cimg alt=\"Contributor Covenant\" src=\"https://camo.githubusercontent.com/20fe195dfecc3508e105ec04e6a1acea97bd409201ac6ca09c07942c8a8e4ad2/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f436f6e7472696275746f72253230436f76656e616e742d76322e3025323061646f707465642d6666363962342e737667\" data-canonical-src=\"https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg\" style=\"max-width: 100%;\"\u003e\n \u003c/a\u003e\n \u003ca href=\"https://zenodo.org/badge/latestdoi/155220641\" rel=\"nofollow\"\u003e\u003cimg src=\"https://camo.githubusercontent.com/517af209a5e04322ac7dd3e2b7091245cea97f513683288ee23956536bdc5b8f/68747470733a2f2f7a656e6f646f2e6f72672f62616467652f3135353232303634312e737667\" alt=\"DOI\" data-canonical-src=\"https://zenodo.org/badge/155220641.svg\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch4 align=\"center\" tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e\n \u003cp dir=\"auto\"\u003e\n \u003cb\u003eEnglish\u003c/b\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_zh-hans.md\"\u003e简体中文\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_zh-hant.md\"\u003e繁體中文\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_ko.md\"\u003e한국어\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_es.md\"\u003eEspañol\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_ja.md\"\u003e日本語\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_hd.md\"\u003eहिन्दी\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_ru.md\"\u003eРусский\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_pt-br.md\"\u003eРortuguês\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_te.md\"\u003eతెలుగు\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_fr.md\"\u003eFrançais\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_de.md\"\u003eDeutsch\u003c/a\u003e |\n \u003ca href=\"https://github.com/huggingface/transformers/blob/main/README_vi.md\"\u003eTiếng Việt\u003c/a\u003e |\n \u003c/p\u003e\n\u003c/h4\u003e\u003ca id=\"user-content-------------english---------简体中文---------繁體中文---------한국어---------español---------日本語---------हिन्दी---------русский---------рortuguês---------తెలుగు---------français---------deutsch---------tiếng-việt-----\" class=\"anchor\" aria-label=\"Permalink: English |\n 简体中文 |\n 繁體中文 |\n 한국어 |\n Español |\n 日本語 |\n हिन्दी |\n Русский |\n Рortuguês |\n తెలుగు |\n Français |\n Deutsch |\n Tiếng Việt |\n \" href=\"#------------english---------简体中文---------繁體中文---------한국어---------español---------日本語---------हिन्दी---------русский---------рortuguês---------తెలుగు---------français---------deutsch---------tiếng-việt-----\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 align=\"center\" tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e\n \u003cp dir=\"auto\"\u003eState-of-the-art Machine Learning for JAX, PyTorch and TensorFlow\u003c/p\u003e\n\u003c/h3\u003e\u003ca id=\"user-content-----state-of-the-art-machine-learning-for-jax-pytorch-and-tensorflow\" class=\"anchor\" aria-label=\"Permalink: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow\" href=\"#----state-of-the-art-machine-learning-for-jax-pytorch-and-tensorflow\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 align=\"center\" tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e\n \u003ca href=\"https://hf.co/course\" rel=\"nofollow\"\u003e\u003cimg src=\"https://camo.githubusercontent.com/ad749d27e199f2237320a760fca37d98b14f72b9e3c855c1a50b733fd3ba4a87/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7265736f6c76652f6d61696e2f636f757273655f62616e6e65722e706e67\" data-canonical-src=\"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/course_banner.png\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n\u003c/h3\u003e\u003ca id=\"user-content-----\" class=\"anchor\" aria-label=\"Permalink: \" href=\"#----\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e🤗 Transformers provides thousands of pretrained models to perform tasks on different modalities such as text, vision, and audio.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eThese models can be applied on:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e📝 Text, for tasks like text classification, information extraction, question answering, summarization, translation, and text generation, in over 100 languages.\u003c/li\u003e\n\u003cli\u003e🖼️ Images, for tasks like image classification, object detection, and segmentation.\u003c/li\u003e\n\u003cli\u003e🗣️ Audio, for tasks like speech recognition and audio classification.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003eTransformer models can also perform tasks on \u003cstrong\u003eseveral modalities combined\u003c/strong\u003e, such as table question answering, optical character recognition, information extraction from scanned documents, video classification, and visual question answering.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e🤗 Transformers provides APIs to quickly download and use those pretrained models on a given text, fine-tune them on your own datasets and then share them with the community on our \u003ca href=\"https://huggingface.co/models\" rel=\"nofollow\"\u003emodel hub\u003c/a\u003e. At the same time, each python module defining an architecture is fully standalone and can be modified to enable quick research experiments.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e🤗 Transformers is backed by the three most popular deep learning libraries — \u003ca href=\"https://jax.readthedocs.io/en/latest/\" rel=\"nofollow\"\u003eJax\u003c/a\u003e, \u003ca href=\"https://pytorch.org/\" rel=\"nofollow\"\u003ePyTorch\u003c/a\u003e and \u003ca href=\"https://www.tensorflow.org/\" rel=\"nofollow\"\u003eTensorFlow\u003c/a\u003e — with a seamless integration between them. It's straightforward to train your models with one before loading them for inference with the other.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eOnline demos\u003c/h2\u003e\u003ca id=\"user-content-online-demos\" class=\"anchor\" aria-label=\"Permalink: Online demos\" href=\"#online-demos\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eYou can test most of our models directly on their pages from the \u003ca href=\"https://huggingface.co/models\" rel=\"nofollow\"\u003emodel hub\u003c/a\u003e. We also offer \u003ca href=\"https://huggingface.co/pricing\" rel=\"nofollow\"\u003eprivate model hosting, versioning, \u0026amp; an inference API\u003c/a\u003e for public and private models.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eHere are a few examples:\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eIn Natural Language Processing:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/google-bert/bert-base-uncased?text=Paris+is+the+%5BMASK%5D+of+France\" rel=\"nofollow\"\u003eMasked word completion with BERT\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/dbmdz/electra-large-discriminator-finetuned-conll03-english?text=My+name+is+Sarah+and+I+live+in+London+city\" rel=\"nofollow\"\u003eNamed Entity Recognition with Electra\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\" rel=\"nofollow\"\u003eText generation with Mistral\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/FacebookAI/roberta-large-mnli?text=The+dog+was+lost.+Nobody+lost+any+animal\" rel=\"nofollow\"\u003eNatural Language Inference with RoBERTa\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/facebook/bart-large-cnn?text=The+tower+is+324+metres+%281%2C063+ft%29+tall%2C+about+the+same+height+as+an+81-storey+building%2C+and+the+tallest+structure+in+Paris.+Its+base+is+square%2C+measuring+125+metres+%28410+ft%29+on+each+side.+During+its+construction%2C+the+Eiffel+Tower+surpassed+the+Washington+Monument+to+become+the+tallest+man-made+structure+in+the+world%2C+a+title+it+held+for+41+years+until+the+Chrysler+Building+in+New+York+City+was+finished+in+1930.+It+was+the+first+structure+to+reach+a+height+of+300+metres.+Due+to+the+addition+of+a+broadcasting+aerial+at+the+top+of+the+tower+in+1957%2C+it+is+now+taller+than+the+Chrysler+Building+by+5.2+metres+%2817+ft%29.+Excluding+transmitters%2C+the+Eiffel+Tower+is+the+second+tallest+free-standing+structure+in+France+after+the+Millau+Viaduct\" rel=\"nofollow\"\u003eSummarization with BART\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/distilbert/distilbert-base-uncased-distilled-squad?text=Which+name+is+also+used+to+describe+the+Amazon+rainforest+in+English%3F\u0026amp;context=The+Amazon+rainforest+%28Portuguese%3A+Floresta+Amaz%C3%B4nica+or+Amaz%C3%B4nia%3B+Spanish%3A+Selva+Amaz%C3%B3nica%2C+Amazon%C3%ADa+or+usually+Amazonia%3B+French%3A+For%C3%AAt+amazonienne%3B+Dutch%3A+Amazoneregenwoud%29%2C+also+known+in+English+as+Amazonia+or+the+Amazon+Jungle%2C+is+a+moist+broadleaf+forest+that+covers+most+of+the+Amazon+basin+of+South+America.+This+basin+encompasses+7%2C000%2C000+square+kilometres+%282%2C700%2C000+sq+mi%29%2C+of+which+5%2C500%2C000+square+kilometres+%282%2C100%2C000+sq+mi%29+are+covered+by+the+rainforest.+This+region+includes+territory+belonging+to+nine+nations.+The+majority+of+the+forest+is+contained+within+Brazil%2C+with+60%25+of+the+rainforest%2C+followed+by+Peru+with+13%25%2C+Colombia+with+10%25%2C+and+with+minor+amounts+in+Venezuela%2C+Ecuador%2C+Bolivia%2C+Guyana%2C+Suriname+and+French+Guiana.+States+or+departments+in+four+nations+contain+%22Amazonas%22+in+their+names.+The+Amazon+represents+over+half+of+the+planet%27s+remaining+rainforests%2C+and+comprises+the+largest+and+most+biodiverse+tract+of+tropical+rainforest+in+the+world%2C+with+an+estimated+390+billion+individual+trees+divided+into+16%2C000+species\" rel=\"nofollow\"\u003eQuestion answering with DistilBERT\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/google-t5/t5-base?text=My+name+is+Wolfgang+and+I+live+in+Berlin\" rel=\"nofollow\"\u003eTranslation with T5\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003eIn Computer Vision:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/google/vit-base-patch16-224\" rel=\"nofollow\"\u003eImage classification with ViT\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/facebook/detr-resnet-50\" rel=\"nofollow\"\u003eObject Detection with DETR\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/nvidia/segformer-b0-finetuned-ade-512-512\" rel=\"nofollow\"\u003eSemantic Segmentation with SegFormer\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/facebook/mask2former-swin-large-coco-panoptic\" rel=\"nofollow\"\u003ePanoptic Segmentation with Mask2Former\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/docs/transformers/main/model_doc/depth_anything\" rel=\"nofollow\"\u003eDepth Estimation with Depth Anything\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/videomae\" rel=\"nofollow\"\u003eVideo Classification with VideoMAE\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/shi-labs/oneformer_ade20k_dinat_large\" rel=\"nofollow\"\u003eUniversal Segmentation with OneFormer\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003eIn Audio:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/openai/whisper-large-v3\" rel=\"nofollow\"\u003eAutomatic Speech Recognition with Whisper\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/superb/wav2vec2-base-superb-ks\" rel=\"nofollow\"\u003eKeyword Spotting with Wav2Vec2\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/MIT/ast-finetuned-audioset-10-10-0.4593\" rel=\"nofollow\"\u003eAudio Classification with Audio Spectrogram Transformer\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cp dir=\"auto\"\u003eIn Multimodal tasks:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/google/tapas-base-finetuned-wtq\" rel=\"nofollow\"\u003eTable Question Answering with TAPAS\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/dandelin/vilt-b32-finetuned-vqa\" rel=\"nofollow\"\u003eVisual Question Answering with ViLT\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/llava-hf/llava-1.5-7b-hf\" rel=\"nofollow\"\u003eImage captioning with LLaVa\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/google/siglip-so400m-patch14-384\" rel=\"nofollow\"\u003eZero-shot Image Classification with SigLIP\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/impira/layoutlm-document-qa\" rel=\"nofollow\"\u003eDocument Question Answering with LayoutLM\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xclip\" rel=\"nofollow\"\u003eZero-shot Video Classification with X-CLIP\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/docs/transformers/en/model_doc/owlv2\" rel=\"nofollow\"\u003eZero-shot Object Detection with OWLv2\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/clipseg\" rel=\"nofollow\"\u003eZero-shot Image Segmentation with CLIPSeg\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/sam\" rel=\"nofollow\"\u003eAutomatic Mask Generation with SAM\u003c/a\u003e\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e100 projects using Transformers\u003c/h2\u003e\u003ca id=\"user-content-100-projects-using-transformers\" class=\"anchor\" aria-label=\"Permalink: 100 projects using Transformers\" href=\"#100-projects-using-transformers\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eTransformers is more than a toolkit to use pretrained models: it's a community of projects built around it and the\nHugging Face Hub. We want Transformers to enable developers, researchers, students, professors, engineers, and anyone\nelse to build their dream projects.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eIn order to celebrate the 100,000 stars of transformers, we have decided to put the spotlight on the\ncommunity, and we have created the \u003ca href=\"/huggingface/transformers/blob/main/awesome-transformers.md\"\u003eawesome-transformers\u003c/a\u003e page which lists 100\nincredible projects built in the vicinity of transformers.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eIf you own or use a project that you believe should be part of the list, please open a PR to add it!\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eIf you are looking for custom support from the Hugging Face team\u003c/h2\u003e\u003ca id=\"user-content-if-you-are-looking-for-custom-support-from-the-hugging-face-team\" class=\"anchor\" aria-label=\"Permalink: If you are looking for custom support from the Hugging Face team\" href=\"#if-you-are-looking-for-custom-support-from-the-hugging-face-team\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003ca href=\"https://huggingface.co/support\" rel=\"nofollow\"\u003e\n \u003cimg alt=\"HuggingFace Expert Acceleration Program\" src=\"https://camo.githubusercontent.com/1831c7d6bdb577a85b80100ac3edbedbc1d47379a4e3614ec9fc917debd5450a/68747470733a2f2f63646e2d6d656469612e68756767696e67666163652e636f2f6d61726b6574696e672f7472616e73666f726d6572732f6e65772d737570706f72742d696d70726f7665642e706e67\" data-canonical-src=\"https://cdn-media.huggingface.co/marketing/transformers/new-support-improved.png\" style=\"max-width: 100%;\"\u003e\n\u003c/a\u003e\u003cbr\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eQuick tour\u003c/h2\u003e\u003ca id=\"user-content-quick-tour\" class=\"anchor\" aria-label=\"Permalink: Quick tour\" href=\"#quick-tour\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eTo immediately use a model on a given input (text, image, audio, ...), we provide the \u003ccode\u003epipeline\u003c/code\u003e API. Pipelines group together a pretrained model with the preprocessing that was used during that model's training. Here is how to quickly use a pipeline to classify positive versus negative texts:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-python notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"\u0026gt;\u0026gt;\u0026gt; from transformers import pipeline\n\n# Allocate a pipeline for sentiment-analysis\n\u0026gt;\u0026gt;\u0026gt; classifier = pipeline('sentiment-analysis')\n\u0026gt;\u0026gt;\u0026gt; classifier('We are very happy to introduce pipeline to the transformers repository.')\n[{'label': 'POSITIVE', 'score': 0.9996980428695679}]\"\u003e\u003cpre\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-k\"\u003efrom\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003etransformers\u003c/span\u003e \u003cspan class=\"pl-k\"\u003eimport\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003epipeline\u003c/span\u003e\n\n\u003cspan class=\"pl-c\"\u003e# Allocate a pipeline for sentiment-analysis\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eclassifier\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-en\"\u003epipeline\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e'sentiment-analysis'\u003c/span\u003e)\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-en\"\u003eclassifier\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e'We are very happy to introduce pipeline to the transformers repository.'\u003c/span\u003e)\n[{\u003cspan class=\"pl-s\"\u003e'label'\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e'POSITIVE'\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'score'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.9996980428695679\u003c/span\u003e}]\u003c/pre\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eThe second line of code downloads and caches the pretrained model used by the pipeline, while the third evaluates it on the given text. Here, the answer is \"positive\" with a confidence of 99.97%.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eMany tasks have a pre-trained \u003ccode\u003epipeline\u003c/code\u003e ready to go, in NLP but also in computer vision and speech. For example, we can easily extract detected objects in an image:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-python notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"\u0026gt;\u0026gt;\u0026gt; import requests\n\u0026gt;\u0026gt;\u0026gt; from PIL import Image\n\u0026gt;\u0026gt;\u0026gt; from transformers import pipeline\n\n# Download an image with cute cats\n\u0026gt;\u0026gt;\u0026gt; url = \u0026quot;https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png\u0026quot;\n\u0026gt;\u0026gt;\u0026gt; image_data = requests.get(url, stream=True).raw\n\u0026gt;\u0026gt;\u0026gt; image = Image.open(image_data)\n\n# Allocate a pipeline for object detection\n\u0026gt;\u0026gt;\u0026gt; object_detector = pipeline('object-detection')\n\u0026gt;\u0026gt;\u0026gt; object_detector(image)\n[{'score': 0.9982201457023621,\n 'label': 'remote',\n 'box': {'xmin': 40, 'ymin': 70, 'xmax': 175, 'ymax': 117}},\n {'score': 0.9960021376609802,\n 'label': 'remote',\n 'box': {'xmin': 333, 'ymin': 72, 'xmax': 368, 'ymax': 187}},\n {'score': 0.9954745173454285,\n 'label': 'couch',\n 'box': {'xmin': 0, 'ymin': 1, 'xmax': 639, 'ymax': 473}},\n {'score': 0.9988006353378296,\n 'label': 'cat',\n 'box': {'xmin': 13, 'ymin': 52, 'xmax': 314, 'ymax': 470}},\n {'score': 0.9986783862113953,\n 'label': 'cat',\n 'box': {'xmin': 345, 'ymin': 23, 'xmax': 640, 'ymax': 368}}]\"\u003e\u003cpre\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-k\"\u003eimport\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003erequests\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-k\"\u003efrom\u003c/span\u003e \u003cspan class=\"pl-v\"\u003ePIL\u003c/span\u003e \u003cspan class=\"pl-k\"\u003eimport\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eImage\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-k\"\u003efrom\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003etransformers\u003c/span\u003e \u003cspan class=\"pl-k\"\u003eimport\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003epipeline\u003c/span\u003e\n\n\u003cspan class=\"pl-c\"\u003e# Download an image with cute cats\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eurl\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-s\"\u003e\"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png\"\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eimage_data\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003erequests\u003c/span\u003e.\u003cspan class=\"pl-en\"\u003eget\u003c/span\u003e(\u003cspan class=\"pl-s1\"\u003eurl\u003c/span\u003e, \u003cspan class=\"pl-s1\"\u003estream\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003eTrue\u003c/span\u003e).\u003cspan class=\"pl-s1\"\u003eraw\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eimage\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eImage\u003c/span\u003e.\u003cspan class=\"pl-en\"\u003eopen\u003c/span\u003e(\u003cspan class=\"pl-s1\"\u003eimage_data\u003c/span\u003e)\n\n\u003cspan class=\"pl-c\"\u003e# Allocate a pipeline for object detection\u003c/span\u003e\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eobject_detector\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-en\"\u003epipeline\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e'object-detection'\u003c/span\u003e)\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-en\"\u003eobject_detector\u003c/span\u003e(\u003cspan class=\"pl-s1\"\u003eimage\u003c/span\u003e)\n[{\u003cspan class=\"pl-s\"\u003e'score'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.9982201457023621\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'label'\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e'remote'\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'box'\u003c/span\u003e: {\u003cspan class=\"pl-s\"\u003e'xmin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e40\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e70\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'xmax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e175\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e117\u003c/span\u003e}},\n {\u003cspan class=\"pl-s\"\u003e'score'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.9960021376609802\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'label'\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e'remote'\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'box'\u003c/span\u003e: {\u003cspan class=\"pl-s\"\u003e'xmin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e333\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e72\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'xmax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e368\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e187\u003c/span\u003e}},\n {\u003cspan class=\"pl-s\"\u003e'score'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.9954745173454285\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'label'\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e'couch'\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'box'\u003c/span\u003e: {\u003cspan class=\"pl-s\"\u003e'xmin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e1\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'xmax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e639\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e473\u003c/span\u003e}},\n {\u003cspan class=\"pl-s\"\u003e'score'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.9988006353378296\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'label'\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e'cat'\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'box'\u003c/span\u003e: {\u003cspan class=\"pl-s\"\u003e'xmin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e13\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e52\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'xmax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e314\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e470\u003c/span\u003e}},\n {\u003cspan class=\"pl-s\"\u003e'score'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e0.9986783862113953\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'label'\u003c/span\u003e: \u003cspan class=\"pl-s\"\u003e'cat'\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003e'box'\u003c/span\u003e: {\u003cspan class=\"pl-s\"\u003e'xmin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e345\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymin'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e23\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'xmax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e640\u003c/span\u003e, \u003cspan class=\"pl-s\"\u003e'ymax'\u003c/span\u003e: \u003cspan class=\"pl-c1\"\u003e368\u003c/span\u003e}}]\u003c/pre\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eHere, we get a list of objects detected in the image, with a box surrounding the object and a confidence score. Here is the original image on the left, with the predictions displayed on the right:\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 align=\"center\" tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003e\n \u003ca target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https://camo.githubusercontent.com/5074b38d91004f5a7205803d02f4b43343028bde67ac0d1e2101f18c49b1e482/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7265736f6c76652f6d61696e2f636f636f5f73616d706c652e706e67\"\u003e\u003cimg src=\"https://camo.githubusercontent.com/5074b38d91004f5a7205803d02f4b43343028bde67ac0d1e2101f18c49b1e482/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7265736f6c76652f6d61696e2f636f636f5f73616d706c652e706e67\" width=\"400\" data-canonical-src=\"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n \u003ca target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https://camo.githubusercontent.com/f2ff360118d3a4ac0a457b46b6d084a25a0ea7f95434cc68991f36563d390a58/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7265736f6c76652f6d61696e2f636f636f5f73616d706c655f706f73745f70726f6365737365642e706e67\"\u003e\u003cimg src=\"https://camo.githubusercontent.com/f2ff360118d3a4ac0a457b46b6d084a25a0ea7f95434cc68991f36563d390a58/68747470733a2f2f68756767696e67666163652e636f2f64617461736574732f68756767696e67666163652f646f63756d656e746174696f6e2d696d616765732f7265736f6c76652f6d61696e2f636f636f5f73616d706c655f706f73745f70726f6365737365642e706e67\" width=\"400\" data-canonical-src=\"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample_post_processed.png\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\n\u003c/h3\u003e\u003ca id=\"user-content---------\" class=\"anchor\" aria-label=\"Permalink: \" href=\"#--------\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eYou can learn more about the tasks supported by the \u003ccode\u003epipeline\u003c/code\u003e API in \u003ca href=\"https://huggingface.co/docs/transformers/task_summary\" rel=\"nofollow\"\u003ethis tutorial\u003c/a\u003e.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eIn addition to \u003ccode\u003epipeline\u003c/code\u003e, to download and use any of the pretrained models on your given task, all it takes is three lines of code. Here is the PyTorch version:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-python notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"\u0026gt;\u0026gt;\u0026gt; from transformers import AutoTokenizer, AutoModel\n\n\u0026gt;\u0026gt;\u0026gt; tokenizer = AutoTokenizer.from_pretrained(\u0026quot;google-bert/bert-base-uncased\u0026quot;)\n\u0026gt;\u0026gt;\u0026gt; model = AutoModel.from_pretrained(\u0026quot;google-bert/bert-base-uncased\u0026quot;)\n\n\u0026gt;\u0026gt;\u0026gt; inputs = tokenizer(\u0026quot;Hello world!\u0026quot;, return_tensors=\u0026quot;pt\u0026quot;)\n\u0026gt;\u0026gt;\u0026gt; outputs = model(**inputs)\"\u003e\u003cpre\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-k\"\u003efrom\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003etransformers\u003c/span\u003e \u003cspan class=\"pl-k\"\u003eimport\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eAutoTokenizer\u003c/span\u003e, \u003cspan class=\"pl-v\"\u003eAutoModel\u003c/span\u003e\n\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003etokenizer\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eAutoTokenizer\u003c/span\u003e.\u003cspan class=\"pl-en\"\u003efrom_pretrained\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e\"google-bert/bert-base-uncased\"\u003c/span\u003e)\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003emodel\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eAutoModel\u003c/span\u003e.\u003cspan class=\"pl-en\"\u003efrom_pretrained\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e\"google-bert/bert-base-uncased\"\u003c/span\u003e)\n\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003einputs\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-en\"\u003etokenizer\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e\"Hello world!\"\u003c/span\u003e, \u003cspan class=\"pl-s1\"\u003ereturn_tensors\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e\u003cspan class=\"pl-s\"\u003e\"pt\"\u003c/span\u003e)\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eoutputs\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-en\"\u003emodel\u003c/span\u003e(\u003cspan class=\"pl-c1\"\u003e**\u003c/span\u003e\u003cspan class=\"pl-s1\"\u003einputs\u003c/span\u003e)\u003c/pre\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eAnd here is the equivalent code for TensorFlow:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-python notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"\u0026gt;\u0026gt;\u0026gt; from transformers import AutoTokenizer, TFAutoModel\n\n\u0026gt;\u0026gt;\u0026gt; tokenizer = AutoTokenizer.from_pretrained(\u0026quot;google-bert/bert-base-uncased\u0026quot;)\n\u0026gt;\u0026gt;\u0026gt; model = TFAutoModel.from_pretrained(\u0026quot;google-bert/bert-base-uncased\u0026quot;)\n\n\u0026gt;\u0026gt;\u0026gt; inputs = tokenizer(\u0026quot;Hello world!\u0026quot;, return_tensors=\u0026quot;tf\u0026quot;)\n\u0026gt;\u0026gt;\u0026gt; outputs = model(**inputs)\"\u003e\u003cpre\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-k\"\u003efrom\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003etransformers\u003c/span\u003e \u003cspan class=\"pl-k\"\u003eimport\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eAutoTokenizer\u003c/span\u003e, \u003cspan class=\"pl-v\"\u003eTFAutoModel\u003c/span\u003e\n\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003etokenizer\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eAutoTokenizer\u003c/span\u003e.\u003cspan class=\"pl-en\"\u003efrom_pretrained\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e\"google-bert/bert-base-uncased\"\u003c/span\u003e)\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003emodel\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-v\"\u003eTFAutoModel\u003c/span\u003e.\u003cspan class=\"pl-en\"\u003efrom_pretrained\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e\"google-bert/bert-base-uncased\"\u003c/span\u003e)\n\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003einputs\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-en\"\u003etokenizer\u003c/span\u003e(\u003cspan class=\"pl-s\"\u003e\"Hello world!\"\u003c/span\u003e, \u003cspan class=\"pl-s1\"\u003ereturn_tensors\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e\u003cspan class=\"pl-s\"\u003e\"tf\"\u003c/span\u003e)\n\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u0026gt;\u003c/span\u003e\u003cspan class=\"pl-c1\"\u003e\u0026gt;\u003c/span\u003e \u003cspan class=\"pl-s1\"\u003eoutputs\u003c/span\u003e \u003cspan class=\"pl-c1\"\u003e=\u003c/span\u003e \u003cspan class=\"pl-en\"\u003emodel\u003c/span\u003e(\u003cspan class=\"pl-c1\"\u003e**\u003c/span\u003e\u003cspan class=\"pl-s1\"\u003einputs\u003c/span\u003e)\u003c/pre\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eThe tokenizer is responsible for all the preprocessing the pretrained model expects and can be called directly on a single string (as in the above examples) or a list. It will output a dictionary that you can use in downstream code or simply directly pass to your model using the ** argument unpacking operator.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eThe model itself is a regular \u003ca href=\"https://pytorch.org/docs/stable/nn.html#torch.nn.Module\" rel=\"nofollow\"\u003ePytorch \u003ccode\u003enn.Module\u003c/code\u003e\u003c/a\u003e or a \u003ca href=\"https://www.tensorflow.org/api_docs/python/tf/keras/Model\" rel=\"nofollow\"\u003eTensorFlow \u003ccode\u003etf.keras.Model\u003c/code\u003e\u003c/a\u003e (depending on your backend) which you can use as usual. \u003ca href=\"https://huggingface.co/docs/transformers/training\" rel=\"nofollow\"\u003eThis tutorial\u003c/a\u003e explains how to integrate such a model into a classic PyTorch or TensorFlow training loop, or how to use our \u003ccode\u003eTrainer\u003c/code\u003e API to quickly fine-tune on a new dataset.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eWhy should I use transformers?\u003c/h2\u003e\u003ca id=\"user-content-why-should-i-use-transformers\" class=\"anchor\" aria-label=\"Permalink: Why should I use transformers?\" href=\"#why-should-i-use-transformers\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003col dir=\"auto\"\u003e\n\u003cli\u003e\n\u003cp dir=\"auto\"\u003eEasy-to-use state-of-the-art models:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003eHigh performance on natural language understanding \u0026amp; generation, computer vision, and audio tasks.\u003c/li\u003e\n\u003cli\u003eLow barrier to entry for educators and practitioners.\u003c/li\u003e\n\u003cli\u003eFew user-facing abstractions with just three classes to learn.\u003c/li\u003e\n\u003cli\u003eA unified API for using all our pretrained models.\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp dir=\"auto\"\u003eLower compute costs, smaller carbon footprint:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003eResearchers can share trained models instead of always retraining.\u003c/li\u003e\n\u003cli\u003ePractitioners can reduce compute time and production costs.\u003c/li\u003e\n\u003cli\u003eDozens of architectures with over 400,000 pretrained models across all modalities.\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp dir=\"auto\"\u003eChoose the right framework for every part of a model's lifetime:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003eTrain state-of-the-art models in 3 lines of code.\u003c/li\u003e\n\u003cli\u003eMove a single model between TF2.0/PyTorch/JAX frameworks at will.\u003c/li\u003e\n\u003cli\u003eSeamlessly pick the right framework for training, evaluation, and production.\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003cli\u003e\n\u003cp dir=\"auto\"\u003eEasily customize a model or an example to your needs:\u003c/p\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003eWe provide examples for each architecture to reproduce the results published by its original authors.\u003c/li\u003e\n\u003cli\u003eModel internals are exposed as consistently as possible.\u003c/li\u003e\n\u003cli\u003eModel files can be used independently of the library for quick experiments.\u003c/li\u003e\n\u003c/ul\u003e\n\u003c/li\u003e\n\u003c/ol\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eWhy shouldn't I use transformers?\u003c/h2\u003e\u003ca id=\"user-content-why-shouldnt-i-use-transformers\" class=\"anchor\" aria-label=\"Permalink: Why shouldn't I use transformers?\" href=\"#why-shouldnt-i-use-transformers\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cul dir=\"auto\"\u003e\n\u003cli\u003eThis library is not a modular toolbox of building blocks for neural nets. The code in the model files is not refactored with additional abstractions on purpose, so that researchers can quickly iterate on each of the models without diving into additional abstractions/files.\u003c/li\u003e\n\u003cli\u003eThe training API is not intended to work on any model but is optimized to work with the models provided by the library. For generic machine learning loops, you should use another library (possibly, \u003ca href=\"https://huggingface.co/docs/accelerate\" rel=\"nofollow\"\u003eAccelerate\u003c/a\u003e).\u003c/li\u003e\n\u003cli\u003eWhile we strive to present as many use cases as possible, the scripts in our \u003ca href=\"https://github.com/huggingface/transformers/tree/main/examples\"\u003eexamples folder\u003c/a\u003e are just that: examples. It is expected that they won't work out-of-the-box on your specific problem and that you will be required to change a few lines of code to adapt them to your needs.\u003c/li\u003e\n\u003c/ul\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eInstallation\u003c/h2\u003e\u003ca id=\"user-content-installation\" class=\"anchor\" aria-label=\"Permalink: Installation\" href=\"#installation\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eWith pip\u003c/h3\u003e\u003ca id=\"user-content-with-pip\" class=\"anchor\" aria-label=\"Permalink: With pip\" href=\"#with-pip\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eThis repository is tested on Python 3.8+, Flax 0.4.1+, PyTorch 1.11+, and TensorFlow 2.6+.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eYou should install 🤗 Transformers in a \u003ca href=\"https://docs.python.org/3/library/venv.html\" rel=\"nofollow\"\u003evirtual environment\u003c/a\u003e. If you're unfamiliar with Python virtual environments, check out the \u003ca href=\"https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/\" rel=\"nofollow\"\u003euser guide\u003c/a\u003e.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eFirst, create a virtual environment with the version of Python you're going to use and activate it.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eThen, you will need to install at least one of Flax, PyTorch, or TensorFlow.\nPlease refer to \u003ca href=\"https://www.tensorflow.org/install/\" rel=\"nofollow\"\u003eTensorFlow installation page\u003c/a\u003e, \u003ca href=\"https://pytorch.org/get-started/locally/#start-locally\" rel=\"nofollow\"\u003ePyTorch installation page\u003c/a\u003e and/or \u003ca href=\"https://github.com/google/flax#quick-install\"\u003eFlax\u003c/a\u003e and \u003ca href=\"https://github.com/google/jax#installation\"\u003eJax\u003c/a\u003e installation pages regarding the specific installation command for your platform.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eWhen one of those backends has been installed, 🤗 Transformers can be installed using pip as follows:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"pip install transformers\"\u003e\u003cpre\u003epip install transformers\u003c/pre\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eIf you'd like to play with the examples or need the bleeding edge of the code and can't wait for a new release, you must \u003ca href=\"https://huggingface.co/docs/transformers/installation#installing-from-source\" rel=\"nofollow\"\u003einstall the library from source\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch3 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eWith conda\u003c/h3\u003e\u003ca id=\"user-content-with-conda\" class=\"anchor\" aria-label=\"Permalink: With conda\" href=\"#with-conda\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e🤗 Transformers can be installed using conda as follows:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-source-shell notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"conda install conda-forge::transformers\"\u003e\u003cpre\u003econda install conda-forge::transformers\u003c/pre\u003e\u003c/div\u003e\n\u003cblockquote\u003e\n\u003cp dir=\"auto\"\u003e\u003cstrong\u003e\u003cem\u003eNOTE:\u003c/em\u003e\u003c/strong\u003e Installing \u003ccode\u003etransformers\u003c/code\u003e from the \u003ccode\u003ehuggingface\u003c/code\u003e channel is deprecated.\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cp dir=\"auto\"\u003eFollow the installation pages of Flax, PyTorch or TensorFlow to see how to install them with conda.\u003c/p\u003e\n\u003cblockquote\u003e\n\u003cp dir=\"auto\"\u003e\u003cstrong\u003e\u003cem\u003eNOTE:\u003c/em\u003e\u003c/strong\u003e On Windows, you may be prompted to activate Developer Mode in order to benefit from caching. If this is not an option for you, please let us know in \u003ca href=\"https://github.com/huggingface/huggingface_hub/issues/1062\" data-hovercard-type=\"issue\" data-hovercard-url=\"/huggingface/huggingface_hub/issues/1062/hovercard\"\u003ethis issue\u003c/a\u003e.\u003c/p\u003e\n\u003c/blockquote\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eModel architectures\u003c/h2\u003e\u003ca id=\"user-content-model-architectures\" class=\"anchor\" aria-label=\"Permalink: Model architectures\" href=\"#model-architectures\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/models\" rel=\"nofollow\"\u003eAll the model checkpoints\u003c/a\u003e\u003c/strong\u003e provided by 🤗 Transformers are seamlessly integrated from the huggingface.co \u003ca href=\"https://huggingface.co/models\" rel=\"nofollow\"\u003emodel hub\u003c/a\u003e, where they are uploaded directly by \u003ca href=\"https://huggingface.co/users\" rel=\"nofollow\"\u003eusers\u003c/a\u003e and \u003ca href=\"https://huggingface.co/organizations\" rel=\"nofollow\"\u003eorganizations\u003c/a\u003e.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eCurrent number of checkpoints: \u003ca target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https://camo.githubusercontent.com/881124979a2b7a122b339a014c592e8a6a58d7c3b4059c3fee040e7cd3fd5cde/68747470733a2f2f696d672e736869656c64732e696f2f656e64706f696e743f75726c3d68747470733a2f2f68756767696e67666163652e636f2f6170692f736869656c64732f6d6f64656c7326636f6c6f723d627269676874677265656e\"\u003e\u003cimg src=\"https://camo.githubusercontent.com/881124979a2b7a122b339a014c592e8a6a58d7c3b4059c3fee040e7cd3fd5cde/68747470733a2f2f696d672e736869656c64732e696f2f656e64706f696e743f75726c3d68747470733a2f2f68756767696e67666163652e636f2f6170692f736869656c64732f6d6f64656c7326636f6c6f723d627269676874677265656e\" alt=\"\" data-canonical-src=\"https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/models\u0026amp;color=brightgreen\" style=\"max-width: 100%;\"\u003e\u003c/a\u003e\u003c/p\u003e\n\u003cp dir=\"auto\"\u003e🤗 Transformers currently provides the following architectures (see \u003ca href=\"https://huggingface.co/docs/transformers/model_summary\" rel=\"nofollow\"\u003ehere\u003c/a\u003e for a high-level summary of each them):\u003c/p\u003e\n\u003col dir=\"auto\"\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/albert\" rel=\"nofollow\"\u003eALBERT\u003c/a\u003e\u003c/strong\u003e (from Google Research and the Toyota Technological Institute at Chicago) released with the paper \u003ca href=\"https://arxiv.org/abs/1909.11942\" rel=\"nofollow\"\u003eALBERT: A Lite BERT for Self-supervised Learning of Language Representations\u003c/a\u003e, by Zhenzhong Lan, Mingda Chen, Sebastian Goodman, Kevin Gimpel, Piyush Sharma, Radu Soricut.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/align\" rel=\"nofollow\"\u003eALIGN\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2102.05918\" rel=\"nofollow\"\u003eScaling Up Visual and Vision-Language Representation Learning With Noisy Text Supervision\u003c/a\u003e by Chao Jia, Yinfei Yang, Ye Xia, Yi-Ting Chen, Zarana Parekh, Hieu Pham, Quoc V. Le, Yunhsuan Sung, Zhen Li, Tom Duerig.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/altclip\" rel=\"nofollow\"\u003eAltCLIP\u003c/a\u003e\u003c/strong\u003e (from BAAI) released with the paper \u003ca href=\"https://arxiv.org/abs/2211.06679\" rel=\"nofollow\"\u003eAltCLIP: Altering the Language Encoder in CLIP for Extended Language Capabilities\u003c/a\u003e by Chen, Zhongzhi and Liu, Guang and Zhang, Bo-Wen and Ye, Fulong and Yang, Qinghong and Wu, Ledell.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/audio-spectrogram-transformer\" rel=\"nofollow\"\u003eAudio Spectrogram Transformer\u003c/a\u003e\u003c/strong\u003e (from MIT) released with the paper \u003ca href=\"https://arxiv.org/abs/2104.01778\" rel=\"nofollow\"\u003eAST: Audio Spectrogram Transformer\u003c/a\u003e by Yuan Gong, Yu-An Chung, James Glass.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/autoformer\" rel=\"nofollow\"\u003eAutoformer\u003c/a\u003e\u003c/strong\u003e (from Tsinghua University) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.13008\" rel=\"nofollow\"\u003eAutoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting\u003c/a\u003e by Haixu Wu, Jiehui Xu, Jianmin Wang, Mingsheng Long.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bark\" rel=\"nofollow\"\u003eBark\u003c/a\u003e\u003c/strong\u003e (from Suno) released in the repository \u003ca href=\"https://github.com/suno-ai/bark\"\u003esuno-ai/bark\u003c/a\u003e by Suno AI team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bart\" rel=\"nofollow\"\u003eBART\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/1910.13461\" rel=\"nofollow\"\u003eBART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension\u003c/a\u003e by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov, and Luke Zettlemoyer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/barthez\" rel=\"nofollow\"\u003eBARThez\u003c/a\u003e\u003c/strong\u003e (from École polytechnique) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.12321\" rel=\"nofollow\"\u003eBARThez: a Skilled Pretrained French Sequence-to-Sequence Model\u003c/a\u003e by Moussa Kamal Eddine, Antoine J.-P. Tixier, Michalis Vazirgiannis.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bartpho\" rel=\"nofollow\"\u003eBARTpho\u003c/a\u003e\u003c/strong\u003e (from VinAI Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2109.09701\" rel=\"nofollow\"\u003eBARTpho: Pre-trained Sequence-to-Sequence Models for Vietnamese\u003c/a\u003e by Nguyen Luong Tran, Duong Minh Le and Dat Quoc Nguyen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/beit\" rel=\"nofollow\"\u003eBEiT\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.08254\" rel=\"nofollow\"\u003eBEiT: BERT Pre-Training of Image Transformers\u003c/a\u003e by Hangbo Bao, Li Dong, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bert\" rel=\"nofollow\"\u003eBERT\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/1810.04805\" rel=\"nofollow\"\u003eBERT: Pre-training of Deep Bidirectional Transformers for Language Understanding\u003c/a\u003e by Jacob Devlin, Ming-Wei Chang, Kenton Lee, and Kristina Toutanova.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bert-generation\" rel=\"nofollow\"\u003eBERT For Sequence Generation\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/1907.12461\" rel=\"nofollow\"\u003eLeveraging Pre-trained Checkpoints for Sequence Generation Tasks\u003c/a\u003e by Sascha Rothe, Shashi Narayan, Aliaksei Severyn.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bertweet\" rel=\"nofollow\"\u003eBERTweet\u003c/a\u003e\u003c/strong\u003e (from VinAI Research) released with the paper \u003ca href=\"https://aclanthology.org/2020.emnlp-demos.2/\" rel=\"nofollow\"\u003eBERTweet: A pre-trained language model for English Tweets\u003c/a\u003e by Dat Quoc Nguyen, Thanh Vu and Anh Tuan Nguyen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bigbird_pegasus\" rel=\"nofollow\"\u003eBigBird-Pegasus\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2007.14062\" rel=\"nofollow\"\u003eBig Bird: Transformers for Longer Sequences\u003c/a\u003e by Manzil Zaheer, Guru Guruganesh, Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, Amr Ahmed.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/big_bird\" rel=\"nofollow\"\u003eBigBird-RoBERTa\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2007.14062\" rel=\"nofollow\"\u003eBig Bird: Transformers for Longer Sequences\u003c/a\u003e by Manzil Zaheer, Guru Guruganesh, Avinava Dubey, Joshua Ainslie, Chris Alberti, Santiago Ontanon, Philip Pham, Anirudh Ravula, Qifan Wang, Li Yang, Amr Ahmed.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/biogpt\" rel=\"nofollow\"\u003eBioGpt\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research AI4Science) released with the paper \u003ca href=\"https://academic.oup.com/bib/advance-article/doi/10.1093/bib/bbac409/6713511?guestAccessKey=a66d9b5d-4f83-4017-bb52-405815c907b9\" rel=\"nofollow\"\u003eBioGPT: generative pre-trained transformer for biomedical text generation and mining\u003c/a\u003e by Renqian Luo, Liai Sun, Yingce Xia, Tao Qin, Sheng Zhang, Hoifung Poon and Tie-Yan Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bit\" rel=\"nofollow\"\u003eBiT\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/1912.11370\" rel=\"nofollow\"\u003eBig Transfer (BiT): General Visual Representation Learning\u003c/a\u003e by Alexander Kolesnikov, Lucas Beyer, Xiaohua Zhai, Joan Puigcerver, Jessica Yung, Sylvain Gelly, Neil Houlsby.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/blenderbot\" rel=\"nofollow\"\u003eBlenderbot\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.13637\" rel=\"nofollow\"\u003eRecipes for building an open-domain chatbot\u003c/a\u003e by Stephen Roller, Emily Dinan, Naman Goyal, Da Ju, Mary Williamson, Yinhan Liu, Jing Xu, Myle Ott, Kurt Shuster, Eric M. Smith, Y-Lan Boureau, Jason Weston.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/blenderbot-small\" rel=\"nofollow\"\u003eBlenderbotSmall\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.13637\" rel=\"nofollow\"\u003eRecipes for building an open-domain chatbot\u003c/a\u003e by Stephen Roller, Emily Dinan, Naman Goyal, Da Ju, Mary Williamson, Yinhan Liu, Jing Xu, Myle Ott, Kurt Shuster, Eric M. Smith, Y-Lan Boureau, Jason Weston.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/blip\" rel=\"nofollow\"\u003eBLIP\u003c/a\u003e\u003c/strong\u003e (from Salesforce) released with the paper \u003ca href=\"https://arxiv.org/abs/2201.12086\" rel=\"nofollow\"\u003eBLIP: Bootstrapping Language-Image Pre-training for Unified Vision-Language Understanding and Generation\u003c/a\u003e by Junnan Li, Dongxu Li, Caiming Xiong, Steven Hoi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/blip-2\" rel=\"nofollow\"\u003eBLIP-2\u003c/a\u003e\u003c/strong\u003e (from Salesforce) released with the paper \u003ca href=\"https://arxiv.org/abs/2301.12597\" rel=\"nofollow\"\u003eBLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models\u003c/a\u003e by Junnan Li, Dongxu Li, Silvio Savarese, Steven Hoi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bloom\" rel=\"nofollow\"\u003eBLOOM\u003c/a\u003e\u003c/strong\u003e (from BigScience workshop) released by the \u003ca href=\"https://bigscience.huggingface.co/\" rel=\"nofollow\"\u003eBigScience Workshop\u003c/a\u003e.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bort\" rel=\"nofollow\"\u003eBORT\u003c/a\u003e\u003c/strong\u003e (from Alexa) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.10499\" rel=\"nofollow\"\u003eOptimal Subarchitecture Extraction For BERT\u003c/a\u003e by Adrian de Wynter and Daniel J. Perry.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bridgetower\" rel=\"nofollow\"\u003eBridgeTower\u003c/a\u003e\u003c/strong\u003e (from Harbin Institute of Technology/Microsoft Research Asia/Intel Labs) released with the paper \u003ca href=\"https://arxiv.org/abs/2206.08657\" rel=\"nofollow\"\u003eBridgeTower: Building Bridges Between Encoders in Vision-Language Representation Learning\u003c/a\u003e by Xiao Xu, Chenfei Wu, Shachar Rosenman, Vasudev Lal, Wanxiang Che, Nan Duan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/bros\" rel=\"nofollow\"\u003eBROS\u003c/a\u003e\u003c/strong\u003e (from NAVER CLOVA) released with the paper \u003ca href=\"https://arxiv.org/abs/2108.04539\" rel=\"nofollow\"\u003eBROS: A Pre-trained Language Model Focusing on Text and Layout for Better Key Information Extraction from Documents\u003c/a\u003e by Teakgyu Hong, Donghyun Kim, Mingi Ji, Wonseok Hwang, Daehyun Nam, Sungrae Park.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/byt5\" rel=\"nofollow\"\u003eByT5\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2105.13626\" rel=\"nofollow\"\u003eByT5: Towards a token-free future with pre-trained byte-to-byte models\u003c/a\u003e by Linting Xue, Aditya Barua, Noah Constant, Rami Al-Rfou, Sharan Narang, Mihir Kale, Adam Roberts, Colin Raffel.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/camembert\" rel=\"nofollow\"\u003eCamemBERT\u003c/a\u003e\u003c/strong\u003e (from Inria/Facebook/Sorbonne) released with the paper \u003ca href=\"https://arxiv.org/abs/1911.03894\" rel=\"nofollow\"\u003eCamemBERT: a Tasty French Language Model\u003c/a\u003e by Louis Martin*, Benjamin Muller*, Pedro Javier Ortiz Suárez*, Yoann Dupont, Laurent Romary, Éric Villemonte de la Clergerie, Djamé Seddah and Benoît Sagot.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/canine\" rel=\"nofollow\"\u003eCANINE\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.06874\" rel=\"nofollow\"\u003eCANINE: Pre-training an Efficient Tokenization-Free Encoder for Language Representation\u003c/a\u003e by Jonathan H. Clark, Dan Garrette, Iulia Turc, John Wieting.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/chinese_clip\" rel=\"nofollow\"\u003eChinese-CLIP\u003c/a\u003e\u003c/strong\u003e (from OFA-Sys) released with the paper \u003ca href=\"https://arxiv.org/abs/2211.01335\" rel=\"nofollow\"\u003eChinese CLIP: Contrastive Vision-Language Pretraining in Chinese\u003c/a\u003e by An Yang, Junshu Pan, Junyang Lin, Rui Men, Yichang Zhang, Jingren Zhou, Chang Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/clap\" rel=\"nofollow\"\u003eCLAP\u003c/a\u003e\u003c/strong\u003e (from LAION-AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2211.06687\" rel=\"nofollow\"\u003eLarge-scale Contrastive Language-Audio Pretraining with Feature Fusion and Keyword-to-Caption Augmentation\u003c/a\u003e by Yusong Wu, Ke Chen, Tianyu Zhang, Yuchen Hui, Taylor Berg-Kirkpatrick, Shlomo Dubnov.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/clip\" rel=\"nofollow\"\u003eCLIP\u003c/a\u003e\u003c/strong\u003e (from OpenAI) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.00020\" rel=\"nofollow\"\u003eLearning Transferable Visual Models From Natural Language Supervision\u003c/a\u003e by Alec Radford, Jong Wook Kim, Chris Hallacy, Aditya Ramesh, Gabriel Goh, Sandhini Agarwal, Girish Sastry, Amanda Askell, Pamela Mishkin, Jack Clark, Gretchen Krueger, Ilya Sutskever.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/clipseg\" rel=\"nofollow\"\u003eCLIPSeg\u003c/a\u003e\u003c/strong\u003e (from University of Göttingen) released with the paper \u003ca href=\"https://arxiv.org/abs/2112.10003\" rel=\"nofollow\"\u003eImage Segmentation Using Text and Image Prompts\u003c/a\u003e by Timo Lüddecke and Alexander Ecker.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/clvp\" rel=\"nofollow\"\u003eCLVP\u003c/a\u003e\u003c/strong\u003e released with the paper \u003ca href=\"https://arxiv.org/abs/2305.07243\" rel=\"nofollow\"\u003eBetter speech synthesis through scaling\u003c/a\u003e by James Betker.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/codegen\" rel=\"nofollow\"\u003eCodeGen\u003c/a\u003e\u003c/strong\u003e (from Salesforce) released with the paper \u003ca href=\"https://arxiv.org/abs/2203.13474\" rel=\"nofollow\"\u003eA Conversational Paradigm for Program Synthesis\u003c/a\u003e by Erik Nijkamp, Bo Pang, Hiroaki Hayashi, Lifu Tu, Huan Wang, Yingbo Zhou, Silvio Savarese, Caiming Xiong.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/llama_code\" rel=\"nofollow\"\u003eCodeLlama\u003c/a\u003e\u003c/strong\u003e (from MetaAI) released with the paper \u003ca href=\"https://ai.meta.com/research/publications/code-llama-open-foundation-models-for-code/\" rel=\"nofollow\"\u003eCode Llama: Open Foundation Models for Code\u003c/a\u003e by Baptiste Rozière, Jonas Gehring, Fabian Gloeckle, Sten Sootla, Itai Gat, Xiaoqing Ellen Tan, Yossi Adi, Jingyu Liu, Tal Remez, Jérémy Rapin, Artyom Kozhevnikov, Ivan Evtimov, Joanna Bitton, Manish Bhatt, Cristian Canton Ferrer, Aaron Grattafiori, Wenhan Xiong, Alexandre Défossez, Jade Copet, Faisal Azhar, Hugo Touvron, Louis Martin, Nicolas Usunier, Thomas Scialom, Gabriel Synnaeve.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/cohere\" rel=\"nofollow\"\u003eCohere\u003c/a\u003e\u003c/strong\u003e (from Cohere) released with the paper \u003ca href=\"https://txt.cohere.com/command-r/\" rel=\"nofollow\"\u003eCommand-R: Retrieval Augmented Generation at Production Scale\u003c/a\u003e by Cohere.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/conditional_detr\" rel=\"nofollow\"\u003eConditional DETR\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/2108.06152\" rel=\"nofollow\"\u003eConditional DETR for Fast Training Convergence\u003c/a\u003e by Depu Meng, Xiaokang Chen, Zejia Fan, Gang Zeng, Houqiang Li, Yuhui Yuan, Lei Sun, Jingdong Wang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/convbert\" rel=\"nofollow\"\u003eConvBERT\u003c/a\u003e\u003c/strong\u003e (from YituTech) released with the paper \u003ca href=\"https://arxiv.org/abs/2008.02496\" rel=\"nofollow\"\u003eConvBERT: Improving BERT with Span-based Dynamic Convolution\u003c/a\u003e by Zihang Jiang, Weihao Yu, Daquan Zhou, Yunpeng Chen, Jiashi Feng, Shuicheng Yan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/convnext\" rel=\"nofollow\"\u003eConvNeXT\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2201.03545\" rel=\"nofollow\"\u003eA ConvNet for the 2020s\u003c/a\u003e by Zhuang Liu, Hanzi Mao, Chao-Yuan Wu, Christoph Feichtenhofer, Trevor Darrell, Saining Xie.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/convnextv2\" rel=\"nofollow\"\u003eConvNeXTV2\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2301.00808\" rel=\"nofollow\"\u003eConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders\u003c/a\u003e by Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon, Saining Xie.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/cpm\" rel=\"nofollow\"\u003eCPM\u003c/a\u003e\u003c/strong\u003e (from Tsinghua University) released with the paper \u003ca href=\"https://arxiv.org/abs/2012.00413\" rel=\"nofollow\"\u003eCPM: A Large-scale Generative Chinese Pre-trained Language Model\u003c/a\u003e by Zhengyan Zhang, Xu Han, Hao Zhou, Pei Ke, Yuxian Gu, Deming Ye, Yujia Qin, Yusheng Su, Haozhe Ji, Jian Guan, Fanchao Qi, Xiaozhi Wang, Yanan Zheng, Guoyang Zeng, Huanqi Cao, Shengqi Chen, Daixuan Li, Zhenbo Sun, Zhiyuan Liu, Minlie Huang, Wentao Han, Jie Tang, Juanzi Li, Xiaoyan Zhu, Maosong Sun.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/cpmant\" rel=\"nofollow\"\u003eCPM-Ant\u003c/a\u003e\u003c/strong\u003e (from OpenBMB) released by the \u003ca href=\"https://www.openbmb.org/\" rel=\"nofollow\"\u003eOpenBMB\u003c/a\u003e.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/ctrl\" rel=\"nofollow\"\u003eCTRL\u003c/a\u003e\u003c/strong\u003e (from Salesforce) released with the paper \u003ca href=\"https://arxiv.org/abs/1909.05858\" rel=\"nofollow\"\u003eCTRL: A Conditional Transformer Language Model for Controllable Generation\u003c/a\u003e by Nitish Shirish Keskar*, Bryan McCann*, Lav R. Varshney, Caiming Xiong and Richard Socher.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/cvt\" rel=\"nofollow\"\u003eCvT\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.15808\" rel=\"nofollow\"\u003eCvT: Introducing Convolutions to Vision Transformers\u003c/a\u003e by Haiping Wu, Bin Xiao, Noel Codella, Mengchen Liu, Xiyang Dai, Lu Yuan, Lei Zhang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/data2vec\" rel=\"nofollow\"\u003eData2Vec\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2202.03555\" rel=\"nofollow\"\u003eData2Vec: A General Framework for Self-supervised Learning in Speech, Vision and Language\u003c/a\u003e by Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/deberta\" rel=\"nofollow\"\u003eDeBERTa\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2006.03654\" rel=\"nofollow\"\u003eDeBERTa: Decoding-enhanced BERT with Disentangled Attention\u003c/a\u003e by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/deberta-v2\" rel=\"nofollow\"\u003eDeBERTa-v2\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2006.03654\" rel=\"nofollow\"\u003eDeBERTa: Decoding-enhanced BERT with Disentangled Attention\u003c/a\u003e by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/decision_transformer\" rel=\"nofollow\"\u003eDecision Transformer\u003c/a\u003e\u003c/strong\u003e (from Berkeley/Facebook/Google) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.01345\" rel=\"nofollow\"\u003eDecision Transformer: Reinforcement Learning via Sequence Modeling\u003c/a\u003e by Lili Chen, Kevin Lu, Aravind Rajeswaran, Kimin Lee, Aditya Grover, Michael Laskin, Pieter Abbeel, Aravind Srinivas, Igor Mordatch.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/deformable_detr\" rel=\"nofollow\"\u003eDeformable DETR\u003c/a\u003e\u003c/strong\u003e (from SenseTime Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.04159\" rel=\"nofollow\"\u003eDeformable DETR: Deformable Transformers for End-to-End Object Detection\u003c/a\u003e by Xizhou Zhu, Weijie Su, Lewei Lu, Bin Li, Xiaogang Wang, Jifeng Dai.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/deit\" rel=\"nofollow\"\u003eDeiT\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2012.12877\" rel=\"nofollow\"\u003eTraining data-efficient image transformers \u0026amp; distillation through attention\u003c/a\u003e by Hugo Touvron, Matthieu Cord, Matthijs Douze, Francisco Massa, Alexandre Sablayrolles, Hervé Jégou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/deplot\" rel=\"nofollow\"\u003eDePlot\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2212.10505\" rel=\"nofollow\"\u003eDePlot: One-shot visual language reasoning by plot-to-table translation\u003c/a\u003e by Fangyu Liu, Julian Martin Eisenschlos, Francesco Piccinno, Syrine Krichene, Chenxi Pang, Kenton Lee, Mandar Joshi, Wenhu Chen, Nigel Collier, Yasemin Altun.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/depth_anything\" rel=\"nofollow\"\u003eDepth Anything\u003c/a\u003e\u003c/strong\u003e (from University of Hong Kong and TikTok) released with the paper \u003ca href=\"https://arxiv.org/abs/2401.10891\" rel=\"nofollow\"\u003eDepth Anything: Unleashing the Power of Large-Scale Unlabeled Data\u003c/a\u003e by Lihe Yang, Bingyi Kang, Zilong Huang, Xiaogang Xu, Jiashi Feng, Hengshuang Zhao.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/deta\" rel=\"nofollow\"\u003eDETA\u003c/a\u003e\u003c/strong\u003e (from The University of Texas at Austin) released with the paper \u003ca href=\"https://arxiv.org/abs/2212.06137\" rel=\"nofollow\"\u003eNMS Strikes Back\u003c/a\u003e by Jeffrey Ouyang-Zhang, Jang Hyun Cho, Xingyi Zhou, Philipp Krähenbühl.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/detr\" rel=\"nofollow\"\u003eDETR\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2005.12872\" rel=\"nofollow\"\u003eEnd-to-End Object Detection with Transformers\u003c/a\u003e by Nicolas Carion, Francisco Massa, Gabriel Synnaeve, Nicolas Usunier, Alexander Kirillov, Sergey Zagoruyko.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/dialogpt\" rel=\"nofollow\"\u003eDialoGPT\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/1911.00536\" rel=\"nofollow\"\u003eDialoGPT: Large-Scale Generative Pre-training for Conversational Response Generation\u003c/a\u003e by Yizhe Zhang, Siqi Sun, Michel Galley, Yen-Chun Chen, Chris Brockett, Xiang Gao, Jianfeng Gao, Jingjing Liu, Bill Dolan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/dinat\" rel=\"nofollow\"\u003eDiNAT\u003c/a\u003e\u003c/strong\u003e (from SHI Labs) released with the paper \u003ca href=\"https://arxiv.org/abs/2209.15001\" rel=\"nofollow\"\u003eDilated Neighborhood Attention Transformer\u003c/a\u003e by Ali Hassani and Humphrey Shi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/dinov2\" rel=\"nofollow\"\u003eDINOv2\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2304.07193\" rel=\"nofollow\"\u003eDINOv2: Learning Robust Visual Features without Supervision\u003c/a\u003e by Maxime Oquab, Timothée Darcet, Théo Moutakanni, Huy Vo, Marc Szafraniec, Vasil Khalidov, Pierre Fernandez, Daniel Haziza, Francisco Massa, Alaaeldin El-Nouby, Mahmoud Assran, Nicolas Ballas, Wojciech Galuba, Russell Howes, Po-Yao Huang, Shang-Wen Li, Ishan Misra, Michael Rabbat, Vasu Sharma, Gabriel Synnaeve, Hu Xu, Hervé Jegou, Julien Mairal, Patrick Labatut, Armand Joulin, Piotr Bojanowski.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/distilbert\" rel=\"nofollow\"\u003eDistilBERT\u003c/a\u003e\u003c/strong\u003e (from HuggingFace), released together with the paper \u003ca href=\"https://arxiv.org/abs/1910.01108\" rel=\"nofollow\"\u003eDistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter\u003c/a\u003e by Victor Sanh, Lysandre Debut and Thomas Wolf. The same method has been applied to compress GPT2 into \u003ca href=\"https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation\"\u003eDistilGPT2\u003c/a\u003e, RoBERTa into \u003ca href=\"https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation\"\u003eDistilRoBERTa\u003c/a\u003e, Multilingual BERT into \u003ca href=\"https://github.com/huggingface/transformers/tree/main/examples/research_projects/distillation\"\u003eDistilmBERT\u003c/a\u003e and a German version of DistilBERT.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/dit\" rel=\"nofollow\"\u003eDiT\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2203.02378\" rel=\"nofollow\"\u003eDiT: Self-supervised Pre-training for Document Image Transformer\u003c/a\u003e by Junlong Li, Yiheng Xu, Tengchao Lv, Lei Cui, Cha Zhang, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/donut\" rel=\"nofollow\"\u003eDonut\u003c/a\u003e\u003c/strong\u003e (from NAVER), released together with the paper \u003ca href=\"https://arxiv.org/abs/2111.15664\" rel=\"nofollow\"\u003eOCR-free Document Understanding Transformer\u003c/a\u003e by Geewook Kim, Teakgyu Hong, Moonbin Yim, Jeongyeon Nam, Jinyoung Park, Jinyeong Yim, Wonseok Hwang, Sangdoo Yun, Dongyoon Han, Seunghyun Park.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/dpr\" rel=\"nofollow\"\u003eDPR\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.04906\" rel=\"nofollow\"\u003eDense Passage Retrieval for Open-Domain Question Answering\u003c/a\u003e by Vladimir Karpukhin, Barlas Oğuz, Sewon Min, Patrick Lewis, Ledell Wu, Sergey Edunov, Danqi Chen, and Wen-tau Yih.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/master/model_doc/dpt\" rel=\"nofollow\"\u003eDPT\u003c/a\u003e\u003c/strong\u003e (from Intel Labs) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.13413\" rel=\"nofollow\"\u003eVision Transformers for Dense Prediction\u003c/a\u003e by René Ranftl, Alexey Bochkovskiy, Vladlen Koltun.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/efficientformer\" rel=\"nofollow\"\u003eEfficientFormer\u003c/a\u003e\u003c/strong\u003e (from Snap Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2206.01191\" rel=\"nofollow\"\u003eEfficientFormer: Vision Transformers at MobileNetSpeed\u003c/a\u003e by Yanyu Li, Geng Yuan, Yang Wen, Ju Hu, Georgios Evangelidis, Sergey Tulyakov, Yanzhi Wang, Jian Ren.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/efficientnet\" rel=\"nofollow\"\u003eEfficientNet\u003c/a\u003e\u003c/strong\u003e (from Google Brain) released with the paper \u003ca href=\"https://arxiv.org/abs/1905.11946\" rel=\"nofollow\"\u003eEfficientNet: Rethinking Model Scaling for Convolutional Neural Networks\u003c/a\u003e by Mingxing Tan, Quoc V. Le.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/electra\" rel=\"nofollow\"\u003eELECTRA\u003c/a\u003e\u003c/strong\u003e (from Google Research/Stanford University) released with the paper \u003ca href=\"https://arxiv.org/abs/2003.10555\" rel=\"nofollow\"\u003eELECTRA: Pre-training text encoders as discriminators rather than generators\u003c/a\u003e by Kevin Clark, Minh-Thang Luong, Quoc V. Le, Christopher D. Manning.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/encodec\" rel=\"nofollow\"\u003eEnCodec\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2210.13438\" rel=\"nofollow\"\u003eHigh Fidelity Neural Audio Compression\u003c/a\u003e by Alexandre Défossez, Jade Copet, Gabriel Synnaeve, Yossi Adi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/encoder-decoder\" rel=\"nofollow\"\u003eEncoderDecoder\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/1907.12461\" rel=\"nofollow\"\u003eLeveraging Pre-trained Checkpoints for Sequence Generation Tasks\u003c/a\u003e by Sascha Rothe, Shashi Narayan, Aliaksei Severyn.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/ernie\" rel=\"nofollow\"\u003eERNIE\u003c/a\u003e\u003c/strong\u003e (from Baidu) released with the paper \u003ca href=\"https://arxiv.org/abs/1904.09223\" rel=\"nofollow\"\u003eERNIE: Enhanced Representation through Knowledge Integration\u003c/a\u003e by Yu Sun, Shuohuan Wang, Yukun Li, Shikun Feng, Xuyi Chen, Han Zhang, Xin Tian, Danxiang Zhu, Hao Tian, Hua Wu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/ernie_m\" rel=\"nofollow\"\u003eErnieM\u003c/a\u003e\u003c/strong\u003e (from Baidu) released with the paper \u003ca href=\"https://arxiv.org/abs/2012.15674\" rel=\"nofollow\"\u003eERNIE-M: Enhanced Multilingual Representation by Aligning Cross-lingual Semantics with Monolingual Corpora\u003c/a\u003e by Xuan Ouyang, Shuohuan Wang, Chao Pang, Yu Sun, Hao Tian, Hua Wu, Haifeng Wang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/esm\" rel=\"nofollow\"\u003eESM\u003c/a\u003e\u003c/strong\u003e (from Meta AI) are transformer protein language models. \u003cstrong\u003eESM-1b\u003c/strong\u003e was released with the paper \u003ca href=\"https://www.pnas.org/content/118/15/e2016239118\" rel=\"nofollow\"\u003eBiological structure and function emerge from scaling unsupervised learning to 250 million protein sequences\u003c/a\u003e by Alexander Rives, Joshua Meier, Tom Sercu, Siddharth Goyal, Zeming Lin, Jason Liu, Demi Guo, Myle Ott, C. Lawrence Zitnick, Jerry Ma, and Rob Fergus. \u003cstrong\u003eESM-1v\u003c/strong\u003e was released with the paper \u003ca href=\"https://doi.org/10.1101/2021.07.09.450648\" rel=\"nofollow\"\u003eLanguage models enable zero-shot prediction of the effects of mutations on protein function\u003c/a\u003e by Joshua Meier, Roshan Rao, Robert Verkuil, Jason Liu, Tom Sercu and Alexander Rives. \u003cstrong\u003eESM-2 and ESMFold\u003c/strong\u003e were released with the paper \u003ca href=\"https://doi.org/10.1101/2022.07.20.500902\" rel=\"nofollow\"\u003eLanguage models of protein sequences at the scale of evolution enable accurate structure prediction\u003c/a\u003e by Zeming Lin, Halil Akin, Roshan Rao, Brian Hie, Zhongkai Zhu, Wenting Lu, Allan dos Santos Costa, Maryam Fazel-Zarandi, Tom Sercu, Sal Candido, Alexander Rives.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/falcon\" rel=\"nofollow\"\u003eFalcon\u003c/a\u003e\u003c/strong\u003e (from Technology Innovation Institute) by Almazrouei, Ebtesam and Alobeidli, Hamza and Alshamsi, Abdulaziz and Cappelli, Alessandro and Cojocaru, Ruxandra and Debbah, Merouane and Goffinet, Etienne and Heslow, Daniel and Launay, Julien and Malartic, Quentin and Noune, Badreddine and Pannier, Baptiste and Penedo, Guilherme.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/fastspeech2_conformer\" rel=\"nofollow\"\u003eFastSpeech2Conformer\u003c/a\u003e\u003c/strong\u003e (from ESPnet) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.13956\" rel=\"nofollow\"\u003eRecent Developments On Espnet Toolkit Boosted By Conformer\u003c/a\u003e by Pengcheng Guo, Florian Boyer, Xuankai Chang, Tomoki Hayashi, Yosuke Higuchi, Hirofumi Inaguma, Naoyuki Kamo, Chenda Li, Daniel Garcia-Romero, Jiatong Shi, Jing Shi, Shinji Watanabe, Kun Wei, Wangyou Zhang, and Yuekai Zhang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/flan-t5\" rel=\"nofollow\"\u003eFLAN-T5\u003c/a\u003e\u003c/strong\u003e (from Google AI) released in the repository \u003ca href=\"https://github.com/google-research/t5x/blob/main/docs/models.md#flan-t5-checkpoints\"\u003egoogle-research/t5x\u003c/a\u003e by Hyung Won Chung, Le Hou, Shayne Longpre, Barret Zoph, Yi Tay, William Fedus, Eric Li, Xuezhi Wang, Mostafa Dehghani, Siddhartha Brahma, Albert Webson, Shixiang Shane Gu, Zhuyun Dai, Mirac Suzgun, Xinyun Chen, Aakanksha Chowdhery, Sharan Narang, Gaurav Mishra, Adams Yu, Vincent Zhao, Yanping Huang, Andrew Dai, Hongkun Yu, Slav Petrov, Ed H. Chi, Jeff Dean, Jacob Devlin, Adam Roberts, Denny Zhou, Quoc V. Le, and Jason Wei\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/flan-ul2\" rel=\"nofollow\"\u003eFLAN-UL2\u003c/a\u003e\u003c/strong\u003e (from Google AI) released in the repository \u003ca href=\"https://github.com/google-research/t5x/blob/main/docs/models.md#flan-ul2-checkpoints\"\u003egoogle-research/t5x\u003c/a\u003e by Hyung Won Chung, Le Hou, Shayne Longpre, Barret Zoph, Yi Tay, William Fedus, Eric Li, Xuezhi Wang, Mostafa Dehghani, Siddhartha Brahma, Albert Webson, Shixiang Shane Gu, Zhuyun Dai, Mirac Suzgun, Xinyun Chen, Aakanksha Chowdhery, Sharan Narang, Gaurav Mishra, Adams Yu, Vincent Zhao, Yanping Huang, Andrew Dai, Hongkun Yu, Slav Petrov, Ed H. Chi, Jeff Dean, Jacob Devlin, Adam Roberts, Denny Zhou, Quoc V. Le, and Jason Wei\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/flaubert\" rel=\"nofollow\"\u003eFlauBERT\u003c/a\u003e\u003c/strong\u003e (from CNRS) released with the paper \u003ca href=\"https://arxiv.org/abs/1912.05372\" rel=\"nofollow\"\u003eFlauBERT: Unsupervised Language Model Pre-training for French\u003c/a\u003e by Hang Le, Loïc Vial, Jibril Frej, Vincent Segonne, Maximin Coavoux, Benjamin Lecouteux, Alexandre Allauzen, Benoît Crabbé, Laurent Besacier, Didier Schwab.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/flava\" rel=\"nofollow\"\u003eFLAVA\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2112.04482\" rel=\"nofollow\"\u003eFLAVA: A Foundational Language And Vision Alignment Model\u003c/a\u003e by Amanpreet Singh, Ronghang Hu, Vedanuj Goswami, Guillaume Couairon, Wojciech Galuba, Marcus Rohrbach, and Douwe Kiela.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/fnet\" rel=\"nofollow\"\u003eFNet\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2105.03824\" rel=\"nofollow\"\u003eFNet: Mixing Tokens with Fourier Transforms\u003c/a\u003e by James Lee-Thorp, Joshua Ainslie, Ilya Eckstein, Santiago Ontanon.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/focalnet\" rel=\"nofollow\"\u003eFocalNet\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2203.11926\" rel=\"nofollow\"\u003eFocal Modulation Networks\u003c/a\u003e by Jianwei Yang, Chunyuan Li, Xiyang Dai, Lu Yuan, Jianfeng Gao.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/funnel\" rel=\"nofollow\"\u003eFunnel Transformer\u003c/a\u003e\u003c/strong\u003e (from CMU/Google Brain) released with the paper \u003ca href=\"https://arxiv.org/abs/2006.03236\" rel=\"nofollow\"\u003eFunnel-Transformer: Filtering out Sequential Redundancy for Efficient Language Processing\u003c/a\u003e by Zihang Dai, Guokun Lai, Yiming Yang, Quoc V. Le.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/fuyu\" rel=\"nofollow\"\u003eFuyu\u003c/a\u003e\u003c/strong\u003e (from ADEPT) Rohan Bavishi, Erich Elsen, Curtis Hawthorne, Maxwell Nye, Augustus Odena, Arushi Somani, Sağnak Taşırlar. Released with the paper \u003ca href=\"https://www.adept.ai/blog/fuyu-8b\" rel=\"nofollow\"\u003eblog post\u003c/a\u003e\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gemma\" rel=\"nofollow\"\u003eGemma\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://blog.google/technology/developers/gemma-open-models/\" rel=\"nofollow\"\u003eGemma: Open Models Based on Gemini Technology and Research\u003c/a\u003e by the Gemma Google team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/git\" rel=\"nofollow\"\u003eGIT\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2205.14100\" rel=\"nofollow\"\u003eGIT: A Generative Image-to-text Transformer for Vision and Language\u003c/a\u003e by Jianfeng Wang, Zhengyuan Yang, Xiaowei Hu, Linjie Li, Kevin Lin, Zhe Gan, Zicheng Liu, Ce Liu, Lijuan Wang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/glpn\" rel=\"nofollow\"\u003eGLPN\u003c/a\u003e\u003c/strong\u003e (from KAIST) released with the paper \u003ca href=\"https://arxiv.org/abs/2201.07436\" rel=\"nofollow\"\u003eGlobal-Local Path Networks for Monocular Depth Estimation with Vertical CutDepth\u003c/a\u003e by Doyeon Kim, Woonghyun Ga, Pyungwhan Ahn, Donggyu Joo, Sehwan Chun, Junmo Kim.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/openai-gpt\" rel=\"nofollow\"\u003eGPT\u003c/a\u003e\u003c/strong\u003e (from OpenAI) released with the paper \u003ca href=\"https://openai.com/research/language-unsupervised/\" rel=\"nofollow\"\u003eImproving Language Understanding by Generative Pre-Training\u003c/a\u003e by Alec Radford, Karthik Narasimhan, Tim Salimans and Ilya Sutskever.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gpt_neo\" rel=\"nofollow\"\u003eGPT Neo\u003c/a\u003e\u003c/strong\u003e (from EleutherAI) released in the repository \u003ca href=\"https://github.com/EleutherAI/gpt-neo\"\u003eEleutherAI/gpt-neo\u003c/a\u003e by Sid Black, Stella Biderman, Leo Gao, Phil Wang and Connor Leahy.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gpt_neox\" rel=\"nofollow\"\u003eGPT NeoX\u003c/a\u003e\u003c/strong\u003e (from EleutherAI) released with the paper \u003ca href=\"https://arxiv.org/abs/2204.06745\" rel=\"nofollow\"\u003eGPT-NeoX-20B: An Open-Source Autoregressive Language Model\u003c/a\u003e by Sid Black, Stella Biderman, Eric Hallahan, Quentin Anthony, Leo Gao, Laurence Golding, Horace He, Connor Leahy, Kyle McDonell, Jason Phang, Michael Pieler, USVSN Sai Prashanth, Shivanshu Purohit, Laria Reynolds, Jonathan Tow, Ben Wang, Samuel Weinbach\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gpt_neox_japanese\" rel=\"nofollow\"\u003eGPT NeoX Japanese\u003c/a\u003e\u003c/strong\u003e (from ABEJA) released by Shinya Otani, Takayoshi Makabe, Anuj Arora, and Kyo Hattori.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gpt2\" rel=\"nofollow\"\u003eGPT-2\u003c/a\u003e\u003c/strong\u003e (from OpenAI) released with the paper \u003ca href=\"https://openai.com/research/better-language-models/\" rel=\"nofollow\"\u003eLanguage Models are Unsupervised Multitask Learners\u003c/a\u003e by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gptj\" rel=\"nofollow\"\u003eGPT-J\u003c/a\u003e\u003c/strong\u003e (from EleutherAI) released in the repository \u003ca href=\"https://github.com/kingoflolz/mesh-transformer-jax/\"\u003ekingoflolz/mesh-transformer-jax\u003c/a\u003e by Ben Wang and Aran Komatsuzaki.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gpt-sw3\" rel=\"nofollow\"\u003eGPT-Sw3\u003c/a\u003e\u003c/strong\u003e (from AI-Sweden) released with the paper \u003ca href=\"http://www.lrec-conf.org/proceedings/lrec2022/pdf/2022.lrec-1.376.pdf\" rel=\"nofollow\"\u003eLessons Learned from GPT-SW3: Building the First Large-Scale Generative Language Model for Swedish\u003c/a\u003e by Ariel Ekgren, Amaru Cuba Gyllensten, Evangelia Gogoulou, Alice Heiman, Severine Verlinden, Joey Öhman, Fredrik Carlsson, Magnus Sahlgren.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gpt_bigcode\" rel=\"nofollow\"\u003eGPTBigCode\u003c/a\u003e\u003c/strong\u003e (from BigCode) released with the paper \u003ca href=\"https://arxiv.org/abs/2301.03988\" rel=\"nofollow\"\u003eSantaCoder: don't reach for the stars!\u003c/a\u003e by Loubna Ben Allal, Raymond Li, Denis Kocetkov, Chenghao Mou, Christopher Akiki, Carlos Munoz Ferrandis, Niklas Muennighoff, Mayank Mishra, Alex Gu, Manan Dey, Logesh Kumar Umapathi, Carolyn Jane Anderson, Yangtian Zi, Joel Lamy Poirier, Hailey Schoelkopf, Sergey Troshin, Dmitry Abulkhanov, Manuel Romero, Michael Lappert, Francesco De Toni, Bernardo García del Río, Qian Liu, Shamik Bose, Urvashi Bhattacharyya, Terry Yue Zhuo, Ian Yu, Paulo Villegas, Marco Zocca, Sourab Mangrulkar, David Lansky, Huu Nguyen, Danish Contractor, Luis Villa, Jia Li, Dzmitry Bahdanau, Yacine Jernite, Sean Hughes, Daniel Fried, Arjun Guha, Harm de Vries, Leandro von Werra.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/gptsan-japanese\" rel=\"nofollow\"\u003eGPTSAN-japanese\u003c/a\u003e\u003c/strong\u003e released in the repository \u003ca href=\"https://github.com/tanreinama/GPTSAN/blob/main/report/model.md\"\u003etanreinama/GPTSAN\u003c/a\u003e by Toshiyuki Sakamoto(tanreinama).\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/graphormer\" rel=\"nofollow\"\u003eGraphormer\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.05234\" rel=\"nofollow\"\u003eDo Transformers Really Perform Bad for Graph Representation?\u003c/a\u003e by Chengxuan Ying, Tianle Cai, Shengjie Luo, Shuxin Zheng, Guolin Ke, Di He, Yanming Shen, Tie-Yan Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/groupvit\" rel=\"nofollow\"\u003eGroupViT\u003c/a\u003e\u003c/strong\u003e (from UCSD, NVIDIA) released with the paper \u003ca href=\"https://arxiv.org/abs/2202.11094\" rel=\"nofollow\"\u003eGroupViT: Semantic Segmentation Emerges from Text Supervision\u003c/a\u003e by Jiarui Xu, Shalini De Mello, Sifei Liu, Wonmin Byeon, Thomas Breuel, Jan Kautz, Xiaolong Wang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/herbert\" rel=\"nofollow\"\u003eHerBERT\u003c/a\u003e\u003c/strong\u003e (from Allegro.pl, AGH University of Science and Technology) released with the paper \u003ca href=\"https://www.aclweb.org/anthology/2020.acl-main.111.pdf\" rel=\"nofollow\"\u003eKLEJ: Comprehensive Benchmark for Polish Language Understanding\u003c/a\u003e by Piotr Rybak, Robert Mroczkowski, Janusz Tracz, Ireneusz Gawlik.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/hubert\" rel=\"nofollow\"\u003eHubert\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.07447\" rel=\"nofollow\"\u003eHuBERT: Self-Supervised Speech Representation Learning by Masked Prediction of Hidden Units\u003c/a\u003e by Wei-Ning Hsu, Benjamin Bolte, Yao-Hung Hubert Tsai, Kushal Lakhotia, Ruslan Salakhutdinov, Abdelrahman Mohamed.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/ibert\" rel=\"nofollow\"\u003eI-BERT\u003c/a\u003e\u003c/strong\u003e (from Berkeley) released with the paper \u003ca href=\"https://arxiv.org/abs/2101.01321\" rel=\"nofollow\"\u003eI-BERT: Integer-only BERT Quantization\u003c/a\u003e by Sehoon Kim, Amir Gholami, Zhewei Yao, Michael W. Mahoney, Kurt Keutzer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/idefics\" rel=\"nofollow\"\u003eIDEFICS\u003c/a\u003e\u003c/strong\u003e (from HuggingFace) released with the paper \u003ca href=\"https://huggingface.co/papers/2306.16527\" rel=\"nofollow\"\u003eOBELICS: An Open Web-Scale Filtered Dataset of Interleaved Image-Text Documents\u003c/a\u003e by Hugo Laurençon, Lucile Saulnier, Léo Tronchon, Stas Bekman, Amanpreet Singh, Anton Lozhkov, Thomas Wang, Siddharth Karamcheti, Alexander M. Rush, Douwe Kiela, Matthieu Cord, Victor Sanh.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/imagegpt\" rel=\"nofollow\"\u003eImageGPT\u003c/a\u003e\u003c/strong\u003e (from OpenAI) released with the paper \u003ca href=\"https://openai.com/blog/image-gpt/\" rel=\"nofollow\"\u003eGenerative Pretraining from Pixels\u003c/a\u003e by Mark Chen, Alec Radford, Rewon Child, Jeffrey Wu, Heewoo Jun, David Luan, Ilya Sutskever.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/informer\" rel=\"nofollow\"\u003eInformer\u003c/a\u003e\u003c/strong\u003e (from Beihang University, UC Berkeley, Rutgers University, SEDD Company) released with the paper \u003ca href=\"https://arxiv.org/abs/2012.07436\" rel=\"nofollow\"\u003eInformer: Beyond Efficient Transformer for Long Sequence Time-Series Forecasting\u003c/a\u003e by Haoyi Zhou, Shanghang Zhang, Jieqi Peng, Shuai Zhang, Jianxin Li, Hui Xiong, and Wancai Zhang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/instructblip\" rel=\"nofollow\"\u003eInstructBLIP\u003c/a\u003e\u003c/strong\u003e (from Salesforce) released with the paper \u003ca href=\"https://arxiv.org/abs/2305.06500\" rel=\"nofollow\"\u003eInstructBLIP: Towards General-purpose Vision-Language Models with Instruction Tuning\u003c/a\u003e by Wenliang Dai, Junnan Li, Dongxu Li, Anthony Meng Huat Tiong, Junqi Zhao, Weisheng Wang, Boyang Li, Pascale Fung, Steven Hoi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/jukebox\" rel=\"nofollow\"\u003eJukebox\u003c/a\u003e\u003c/strong\u003e (from OpenAI) released with the paper \u003ca href=\"https://arxiv.org/pdf/2005.00341.pdf\" rel=\"nofollow\"\u003eJukebox: A Generative Model for Music\u003c/a\u003e by Prafulla Dhariwal, Heewoo Jun, Christine Payne, Jong Wook Kim, Alec Radford, Ilya Sutskever.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/kosmos-2\" rel=\"nofollow\"\u003eKOSMOS-2\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/2306.14824\" rel=\"nofollow\"\u003eKosmos-2: Grounding Multimodal Large Language Models to the World\u003c/a\u003e by Zhiliang Peng, Wenhui Wang, Li Dong, Yaru Hao, Shaohan Huang, Shuming Ma, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/layoutlm\" rel=\"nofollow\"\u003eLayoutLM\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/1912.13318\" rel=\"nofollow\"\u003eLayoutLM: Pre-training of Text and Layout for Document Image Understanding\u003c/a\u003e by Yiheng Xu, Minghao Li, Lei Cui, Shaohan Huang, Furu Wei, Ming Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/layoutlmv2\" rel=\"nofollow\"\u003eLayoutLMv2\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/2012.14740\" rel=\"nofollow\"\u003eLayoutLMv2: Multi-modal Pre-training for Visually-Rich Document Understanding\u003c/a\u003e by Yang Xu, Yiheng Xu, Tengchao Lv, Lei Cui, Furu Wei, Guoxin Wang, Yijuan Lu, Dinei Florencio, Cha Zhang, Wanxiang Che, Min Zhang, Lidong Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/layoutlmv3\" rel=\"nofollow\"\u003eLayoutLMv3\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/2204.08387\" rel=\"nofollow\"\u003eLayoutLMv3: Pre-training for Document AI with Unified Text and Image Masking\u003c/a\u003e by Yupan Huang, Tengchao Lv, Lei Cui, Yutong Lu, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/layoutxlm\" rel=\"nofollow\"\u003eLayoutXLM\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/2104.08836\" rel=\"nofollow\"\u003eLayoutXLM: Multimodal Pre-training for Multilingual Visually-rich Document Understanding\u003c/a\u003e by Yiheng Xu, Tengchao Lv, Lei Cui, Guoxin Wang, Yijuan Lu, Dinei Florencio, Cha Zhang, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/led\" rel=\"nofollow\"\u003eLED\u003c/a\u003e\u003c/strong\u003e (from AllenAI) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.05150\" rel=\"nofollow\"\u003eLongformer: The Long-Document Transformer\u003c/a\u003e by Iz Beltagy, Matthew E. Peters, Arman Cohan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/levit\" rel=\"nofollow\"\u003eLeViT\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2104.01136\" rel=\"nofollow\"\u003eLeViT: A Vision Transformer in ConvNet's Clothing for Faster Inference\u003c/a\u003e by Ben Graham, Alaaeldin El-Nouby, Hugo Touvron, Pierre Stock, Armand Joulin, Hervé Jégou, Matthijs Douze.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/lilt\" rel=\"nofollow\"\u003eLiLT\u003c/a\u003e\u003c/strong\u003e (from South China University of Technology) released with the paper \u003ca href=\"https://arxiv.org/abs/2202.13669\" rel=\"nofollow\"\u003eLiLT: A Simple yet Effective Language-Independent Layout Transformer for Structured Document Understanding\u003c/a\u003e by Jiapeng Wang, Lianwen Jin, Kai Ding.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/llama\" rel=\"nofollow\"\u003eLLaMA\u003c/a\u003e\u003c/strong\u003e (from The FAIR team of Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2302.13971\" rel=\"nofollow\"\u003eLLaMA: Open and Efficient Foundation Language Models\u003c/a\u003e by Hugo Touvron, Thibaut Lavril, Gautier Izacard, Xavier Martinet, Marie-Anne Lachaux, Timothée Lacroix, Baptiste Rozière, Naman Goyal, Eric Hambro, Faisal Azhar, Aurelien Rodriguez, Armand Joulin, Edouard Grave, Guillaume Lample.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/llama2\" rel=\"nofollow\"\u003eLlama2\u003c/a\u003e\u003c/strong\u003e (from The FAIR team of Meta AI) released with the paper \u003ca href=\"https://ai.meta.com/research/publications/llama-2-open-foundation-and-fine-tuned-chat-models/\" rel=\"nofollow\"\u003eLlama2: Open Foundation and Fine-Tuned Chat Models\u003c/a\u003e by Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushka rMishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing EllenTan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu, Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/llava\" rel=\"nofollow\"\u003eLLaVa\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research \u0026amp; University of Wisconsin-Madison) released with the paper \u003ca href=\"https://arxiv.org/abs/2304.08485\" rel=\"nofollow\"\u003eVisual Instruction Tuning\u003c/a\u003e by Haotian Liu, Chunyuan Li, Yuheng Li and Yong Jae Lee.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/main/model_doc/llava_next\" rel=\"nofollow\"\u003eLLaVA-NeXT\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research \u0026amp; University of Wisconsin-Madison) released with the paper \u003ca href=\"https://arxiv.org/abs/2310.03744\" rel=\"nofollow\"\u003eImproved Baselines with Visual Instruction Tuning\u003c/a\u003e by Haotian Liu, Chunyuan Li, Yuheng Li and Yong Jae Lee.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/longformer\" rel=\"nofollow\"\u003eLongformer\u003c/a\u003e\u003c/strong\u003e (from AllenAI) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.05150\" rel=\"nofollow\"\u003eLongformer: The Long-Document Transformer\u003c/a\u003e by Iz Beltagy, Matthew E. Peters, Arman Cohan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/longt5\" rel=\"nofollow\"\u003eLongT5\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2112.07916\" rel=\"nofollow\"\u003eLongT5: Efficient Text-To-Text Transformer for Long Sequences\u003c/a\u003e by Mandy Guo, Joshua Ainslie, David Uthus, Santiago Ontanon, Jianmo Ni, Yun-Hsuan Sung, Yinfei Yang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/luke\" rel=\"nofollow\"\u003eLUKE\u003c/a\u003e\u003c/strong\u003e (from Studio Ousia) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.01057\" rel=\"nofollow\"\u003eLUKE: Deep Contextualized Entity Representations with Entity-aware Self-attention\u003c/a\u003e by Ikuya Yamada, Akari Asai, Hiroyuki Shindo, Hideaki Takeda, Yuji Matsumoto.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/lxmert\" rel=\"nofollow\"\u003eLXMERT\u003c/a\u003e\u003c/strong\u003e (from UNC Chapel Hill) released with the paper \u003ca href=\"https://arxiv.org/abs/1908.07490\" rel=\"nofollow\"\u003eLXMERT: Learning Cross-Modality Encoder Representations from Transformers for Open-Domain Question Answering\u003c/a\u003e by Hao Tan and Mohit Bansal.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mctct\" rel=\"nofollow\"\u003eM-CTC-T\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2111.00161\" rel=\"nofollow\"\u003ePseudo-Labeling For Massively Multilingual Speech Recognition\u003c/a\u003e by Loren Lugosch, Tatiana Likhomanenko, Gabriel Synnaeve, and Ronan Collobert.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/m2m_100\" rel=\"nofollow\"\u003eM2M100\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.11125\" rel=\"nofollow\"\u003eBeyond English-Centric Multilingual Machine Translation\u003c/a\u003e by Angela Fan, Shruti Bhosale, Holger Schwenk, Zhiyi Ma, Ahmed El-Kishky, Siddharth Goyal, Mandeep Baines, Onur Celebi, Guillaume Wenzek, Vishrav Chaudhary, Naman Goyal, Tom Birch, Vitaliy Liptchinsky, Sergey Edunov, Edouard Grave, Michael Auli, Armand Joulin.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/madlad-400\" rel=\"nofollow\"\u003eMADLAD-400\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/2309.04662\" rel=\"nofollow\"\u003eMADLAD-400: A Multilingual And Document-Level Large Audited Dataset\u003c/a\u003e by Sneha Kudugunta, Isaac Caswell, Biao Zhang, Xavier Garcia, Christopher A. Choquette-Choo, Katherine Lee, Derrick Xin, Aditya Kusupati, Romi Stella, Ankur Bapna, Orhan Firat.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mamba\" rel=\"nofollow\"\u003eMamba\u003c/a\u003e\u003c/strong\u003e (from Albert Gu and Tri Dao) released with the paper \u003ca href=\"https://arxiv.org/abs/2312.00752\" rel=\"nofollow\"\u003eMamba: Linear-Time Sequence Modeling with Selective State Spaces\u003c/a\u003e by Albert Gu and Tri Dao.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/marian\" rel=\"nofollow\"\u003eMarianMT\u003c/a\u003e\u003c/strong\u003e Machine translation models trained using \u003ca href=\"http://opus.nlpl.eu/\" rel=\"nofollow\"\u003eOPUS\u003c/a\u003e data by Jörg Tiedemann. The \u003ca href=\"https://marian-nmt.github.io/\" rel=\"nofollow\"\u003eMarian Framework\u003c/a\u003e is being developed by the Microsoft Translator Team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/markuplm\" rel=\"nofollow\"\u003eMarkupLM\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research Asia) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.08518\" rel=\"nofollow\"\u003eMarkupLM: Pre-training of Text and Markup Language for Visually-rich Document Understanding\u003c/a\u003e by Junlong Li, Yiheng Xu, Lei Cui, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mask2former\" rel=\"nofollow\"\u003eMask2Former\u003c/a\u003e\u003c/strong\u003e (from FAIR and UIUC) released with the paper \u003ca href=\"https://arxiv.org/abs/2112.01527\" rel=\"nofollow\"\u003eMasked-attention Mask Transformer for Universal Image Segmentation\u003c/a\u003e by Bowen Cheng, Ishan Misra, Alexander G. Schwing, Alexander Kirillov, Rohit Girdhar.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/maskformer\" rel=\"nofollow\"\u003eMaskFormer\u003c/a\u003e\u003c/strong\u003e (from Meta and UIUC) released with the paper \u003ca href=\"https://arxiv.org/abs/2107.06278\" rel=\"nofollow\"\u003ePer-Pixel Classification is Not All You Need for Semantic Segmentation\u003c/a\u003e by Bowen Cheng, Alexander G. Schwing, Alexander Kirillov.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/matcha\" rel=\"nofollow\"\u003eMatCha\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2212.09662\" rel=\"nofollow\"\u003eMatCha: Enhancing Visual Language Pretraining with Math Reasoning and Chart Derendering\u003c/a\u003e by Fangyu Liu, Francesco Piccinno, Syrine Krichene, Chenxi Pang, Kenton Lee, Mandar Joshi, Yasemin Altun, Nigel Collier, Julian Martin Eisenschlos.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mbart\" rel=\"nofollow\"\u003emBART\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2001.08210\" rel=\"nofollow\"\u003eMultilingual Denoising Pre-training for Neural Machine Translation\u003c/a\u003e by Yinhan Liu, Jiatao Gu, Naman Goyal, Xian Li, Sergey Edunov, Marjan Ghazvininejad, Mike Lewis, Luke Zettlemoyer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mbart\" rel=\"nofollow\"\u003emBART-50\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2008.00401\" rel=\"nofollow\"\u003eMultilingual Translation with Extensible Multilingual Pretraining and Finetuning\u003c/a\u003e by Yuqing Tang, Chau Tran, Xian Li, Peng-Jen Chen, Naman Goyal, Vishrav Chaudhary, Jiatao Gu, Angela Fan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mega\" rel=\"nofollow\"\u003eMEGA\u003c/a\u003e\u003c/strong\u003e (from Meta/USC/CMU/SJTU) released with the paper \u003ca href=\"https://arxiv.org/abs/2209.10655\" rel=\"nofollow\"\u003eMega: Moving Average Equipped Gated Attention\u003c/a\u003e by Xuezhe Ma, Chunting Zhou, Xiang Kong, Junxian He, Liangke Gui, Graham Neubig, Jonathan May, and Luke Zettlemoyer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/megatron-bert\" rel=\"nofollow\"\u003eMegatron-BERT\u003c/a\u003e\u003c/strong\u003e (from NVIDIA) released with the paper \u003ca href=\"https://arxiv.org/abs/1909.08053\" rel=\"nofollow\"\u003eMegatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism\u003c/a\u003e by Mohammad Shoeybi, Mostofa Patwary, Raul Puri, Patrick LeGresley, Jared Casper and Bryan Catanzaro.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/megatron_gpt2\" rel=\"nofollow\"\u003eMegatron-GPT2\u003c/a\u003e\u003c/strong\u003e (from NVIDIA) released with the paper \u003ca href=\"https://arxiv.org/abs/1909.08053\" rel=\"nofollow\"\u003eMegatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism\u003c/a\u003e by Mohammad Shoeybi, Mostofa Patwary, Raul Puri, Patrick LeGresley, Jared Casper and Bryan Catanzaro.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mgp-str\" rel=\"nofollow\"\u003eMGP-STR\u003c/a\u003e\u003c/strong\u003e (from Alibaba Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2209.03592\" rel=\"nofollow\"\u003eMulti-Granularity Prediction for Scene Text Recognition\u003c/a\u003e by Peng Wang, Cheng Da, and Cong Yao.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mistral\" rel=\"nofollow\"\u003eMistral\u003c/a\u003e\u003c/strong\u003e (from Mistral AI) by The \u003ca href=\"https://mistral.ai\" rel=\"nofollow\"\u003eMistral AI\u003c/a\u003e team: Albert Jiang, Alexandre Sablayrolles, Arthur Mensch, Chris Bamford, Devendra Singh Chaplot, Diego de las Casas, Florian Bressand, Gianna Lengyel, Guillaume Lample, Lélio Renard Lavaud, Lucile Saulnier, Marie-Anne Lachaux, Pierre Stock, Teven Le Scao, Thibaut Lavril, Thomas Wang, Timothée Lacroix, William El Sayed.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mixtral\" rel=\"nofollow\"\u003eMixtral\u003c/a\u003e\u003c/strong\u003e (from Mistral AI) by The \u003ca href=\"https://mistral.ai\" rel=\"nofollow\"\u003eMistral AI\u003c/a\u003e team: Albert Jiang, Alexandre Sablayrolles, Arthur Mensch, Chris Bamford, Devendra Singh Chaplot, Diego de las Casas, Florian Bressand, Gianna Lengyel, Guillaume Lample, Lélio Renard Lavaud, Lucile Saulnier, Marie-Anne Lachaux, Pierre Stock, Teven Le Scao, Thibaut Lavril, Thomas Wang, Timothée Lacroix, William El Sayed.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mluke\" rel=\"nofollow\"\u003emLUKE\u003c/a\u003e\u003c/strong\u003e (from Studio Ousia) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.08151\" rel=\"nofollow\"\u003emLUKE: The Power of Entity Representations in Multilingual Pretrained Language Models\u003c/a\u003e by Ryokan Ri, Ikuya Yamada, and Yoshimasa Tsuruoka.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mms\" rel=\"nofollow\"\u003eMMS\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2305.13516\" rel=\"nofollow\"\u003eScaling Speech Technology to 1,000+ Languages\u003c/a\u003e by Vineel Pratap, Andros Tjandra, Bowen Shi, Paden Tomasello, Arun Babu, Sayani Kundu, Ali Elkahky, Zhaoheng Ni, Apoorv Vyas, Maryam Fazel-Zarandi, Alexei Baevski, Yossi Adi, Xiaohui Zhang, Wei-Ning Hsu, Alexis Conneau, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mobilebert\" rel=\"nofollow\"\u003eMobileBERT\u003c/a\u003e\u003c/strong\u003e (from CMU/Google Brain) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.02984\" rel=\"nofollow\"\u003eMobileBERT: a Compact Task-Agnostic BERT for Resource-Limited Devices\u003c/a\u003e by Zhiqing Sun, Hongkun Yu, Xiaodan Song, Renjie Liu, Yiming Yang, and Denny Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mobilenet_v1\" rel=\"nofollow\"\u003eMobileNetV1\u003c/a\u003e\u003c/strong\u003e (from Google Inc.) released with the paper \u003ca href=\"https://arxiv.org/abs/1704.04861\" rel=\"nofollow\"\u003eMobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications\u003c/a\u003e by Andrew G. Howard, Menglong Zhu, Bo Chen, Dmitry Kalenichenko, Weijun Wang, Tobias Weyand, Marco Andreetto, Hartwig Adam.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mobilenet_v2\" rel=\"nofollow\"\u003eMobileNetV2\u003c/a\u003e\u003c/strong\u003e (from Google Inc.) released with the paper \u003ca href=\"https://arxiv.org/abs/1801.04381\" rel=\"nofollow\"\u003eMobileNetV2: Inverted Residuals and Linear Bottlenecks\u003c/a\u003e by Mark Sandler, Andrew Howard, Menglong Zhu, Andrey Zhmoginov, Liang-Chieh Chen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mobilevit\" rel=\"nofollow\"\u003eMobileViT\u003c/a\u003e\u003c/strong\u003e (from Apple) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.02178\" rel=\"nofollow\"\u003eMobileViT: Light-weight, General-purpose, and Mobile-friendly Vision Transformer\u003c/a\u003e by Sachin Mehta and Mohammad Rastegari.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mobilevitv2\" rel=\"nofollow\"\u003eMobileViTV2\u003c/a\u003e\u003c/strong\u003e (from Apple) released with the paper \u003ca href=\"https://arxiv.org/abs/2206.02680\" rel=\"nofollow\"\u003eSeparable Self-attention for Mobile Vision Transformers\u003c/a\u003e by Sachin Mehta and Mohammad Rastegari.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mpnet\" rel=\"nofollow\"\u003eMPNet\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.09297\" rel=\"nofollow\"\u003eMPNet: Masked and Permuted Pre-training for Language Understanding\u003c/a\u003e by Kaitao Song, Xu Tan, Tao Qin, Jianfeng Lu, Tie-Yan Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mpt\" rel=\"nofollow\"\u003eMPT\u003c/a\u003e\u003c/strong\u003e (from MosaiML) released with the repository \u003ca href=\"https://github.com/mosaicml/llm-foundry/\"\u003ellm-foundry\u003c/a\u003e by the MosaicML NLP Team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mra\" rel=\"nofollow\"\u003eMRA\u003c/a\u003e\u003c/strong\u003e (from the University of Wisconsin - Madison) released with the paper \u003ca href=\"https://arxiv.org/abs/2207.10284\" rel=\"nofollow\"\u003eMulti Resolution Analysis (MRA) for Approximate Self-Attention\u003c/a\u003e by Zhanpeng Zeng, Sourav Pal, Jeffery Kline, Glenn M Fung, Vikas Singh.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mt5\" rel=\"nofollow\"\u003eMT5\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.11934\" rel=\"nofollow\"\u003emT5: A massively multilingual pre-trained text-to-text transformer\u003c/a\u003e by Linting Xue, Noah Constant, Adam Roberts, Mihir Kale, Rami Al-Rfou, Aditya Siddhant, Aditya Barua, Colin Raffel.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/musicgen\" rel=\"nofollow\"\u003eMusicGen\u003c/a\u003e\u003c/strong\u003e (from Meta) released with the paper \u003ca href=\"https://arxiv.org/abs/2306.05284\" rel=\"nofollow\"\u003eSimple and Controllable Music Generation\u003c/a\u003e by Jade Copet, Felix Kreuk, Itai Gat, Tal Remez, David Kant, Gabriel Synnaeve, Yossi Adi and Alexandre Défossez.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/musicgen_melody\" rel=\"nofollow\"\u003eMusicGen Melody\u003c/a\u003e\u003c/strong\u003e (from Meta) released with the paper \u003ca href=\"https://arxiv.org/abs/2306.05284\" rel=\"nofollow\"\u003eSimple and Controllable Music Generation\u003c/a\u003e by Jade Copet, Felix Kreuk, Itai Gat, Tal Remez, David Kant, Gabriel Synnaeve, Yossi Adi and Alexandre Défossez.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/mvp\" rel=\"nofollow\"\u003eMVP\u003c/a\u003e\u003c/strong\u003e (from RUC AI Box) released with the paper \u003ca href=\"https://arxiv.org/abs/2206.12131\" rel=\"nofollow\"\u003eMVP: Multi-task Supervised Pre-training for Natural Language Generation\u003c/a\u003e by Tianyi Tang, Junyi Li, Wayne Xin Zhao and Ji-Rong Wen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/nat\" rel=\"nofollow\"\u003eNAT\u003c/a\u003e\u003c/strong\u003e (from SHI Labs) released with the paper \u003ca href=\"https://arxiv.org/abs/2204.07143\" rel=\"nofollow\"\u003eNeighborhood Attention Transformer\u003c/a\u003e by Ali Hassani, Steven Walton, Jiachen Li, Shen Li, and Humphrey Shi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/nezha\" rel=\"nofollow\"\u003eNezha\u003c/a\u003e\u003c/strong\u003e (from Huawei Noah’s Ark Lab) released with the paper \u003ca href=\"https://arxiv.org/abs/1909.00204\" rel=\"nofollow\"\u003eNEZHA: Neural Contextualized Representation for Chinese Language Understanding\u003c/a\u003e by Junqiu Wei, Xiaozhe Ren, Xiaoguang Li, Wenyong Huang, Yi Liao, Yasheng Wang, Jiashu Lin, Xin Jiang, Xiao Chen and Qun Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/nllb\" rel=\"nofollow\"\u003eNLLB\u003c/a\u003e\u003c/strong\u003e (from Meta) released with the paper \u003ca href=\"https://arxiv.org/abs/2207.04672\" rel=\"nofollow\"\u003eNo Language Left Behind: Scaling Human-Centered Machine Translation\u003c/a\u003e by the NLLB team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/nllb-moe\" rel=\"nofollow\"\u003eNLLB-MOE\u003c/a\u003e\u003c/strong\u003e (from Meta) released with the paper \u003ca href=\"https://arxiv.org/abs/2207.04672\" rel=\"nofollow\"\u003eNo Language Left Behind: Scaling Human-Centered Machine Translation\u003c/a\u003e by the NLLB team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/nougat\" rel=\"nofollow\"\u003eNougat\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2308.13418\" rel=\"nofollow\"\u003eNougat: Neural Optical Understanding for Academic Documents\u003c/a\u003e by Lukas Blecher, Guillem Cucurull, Thomas Scialom, Robert Stojnic.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/nystromformer\" rel=\"nofollow\"\u003eNyströmformer\u003c/a\u003e\u003c/strong\u003e (from the University of Wisconsin - Madison) released with the paper \u003ca href=\"https://arxiv.org/abs/2102.03902\" rel=\"nofollow\"\u003eNyströmformer: A Nyström-Based Algorithm for Approximating Self-Attention\u003c/a\u003e by Yunyang Xiong, Zhanpeng Zeng, Rudrasis Chakraborty, Mingxing Tan, Glenn Fung, Yin Li, Vikas Singh.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/oneformer\" rel=\"nofollow\"\u003eOneFormer\u003c/a\u003e\u003c/strong\u003e (from SHI Labs) released with the paper \u003ca href=\"https://arxiv.org/abs/2211.06220\" rel=\"nofollow\"\u003eOneFormer: One Transformer to Rule Universal Image Segmentation\u003c/a\u003e by Jitesh Jain, Jiachen Li, MangTik Chiu, Ali Hassani, Nikita Orlov, Humphrey Shi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/open-llama\" rel=\"nofollow\"\u003eOpenLlama\u003c/a\u003e\u003c/strong\u003e (from \u003ca href=\"https://huggingface.co/s-JoL\" rel=\"nofollow\"\u003es-JoL\u003c/a\u003e) released on GitHub (now removed).\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/master/model_doc/opt\" rel=\"nofollow\"\u003eOPT\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2205.01068\" rel=\"nofollow\"\u003eOPT: Open Pre-trained Transformer Language Models\u003c/a\u003e by Susan Zhang, Stephen Roller, Naman Goyal, Mikel Artetxe, Moya Chen, Shuohui Chen et al.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/owlvit\" rel=\"nofollow\"\u003eOWL-ViT\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2205.06230\" rel=\"nofollow\"\u003eSimple Open-Vocabulary Object Detection with Vision Transformers\u003c/a\u003e by Matthias Minderer, Alexey Gritsenko, Austin Stone, Maxim Neumann, Dirk Weissenborn, Alexey Dosovitskiy, Aravindh Mahendran, Anurag Arnab, Mostafa Dehghani, Zhuoran Shen, Xiao Wang, Xiaohua Zhai, Thomas Kipf, and Neil Houlsby.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/owlv2\" rel=\"nofollow\"\u003eOWLv2\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2306.09683\" rel=\"nofollow\"\u003eScaling Open-Vocabulary Object Detection\u003c/a\u003e by Matthias Minderer, Alexey Gritsenko, Neil Houlsby.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/patchtsmixer\" rel=\"nofollow\"\u003ePatchTSMixer\u003c/a\u003e\u003c/strong\u003e (from IBM Research) released with the paper \u003ca href=\"https://arxiv.org/pdf/2306.09364.pdf\" rel=\"nofollow\"\u003eTSMixer: Lightweight MLP-Mixer Model for Multivariate Time Series Forecasting\u003c/a\u003e by Vijay Ekambaram, Arindam Jati, Nam Nguyen, Phanwadee Sinthong, Jayant Kalagnanam.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/patchtst\" rel=\"nofollow\"\u003ePatchTST\u003c/a\u003e\u003c/strong\u003e (from IBM) released with the paper \u003ca href=\"https://arxiv.org/abs/2211.14730\" rel=\"nofollow\"\u003eA Time Series is Worth 64 Words: Long-term Forecasting with Transformers\u003c/a\u003e by Yuqi Nie, Nam H. Nguyen, Phanwadee Sinthong, Jayant Kalagnanam.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/pegasus\" rel=\"nofollow\"\u003ePegasus\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/1912.08777\" rel=\"nofollow\"\u003ePEGASUS: Pre-training with Extracted Gap-sentences for Abstractive Summarization\u003c/a\u003e by Jingqing Zhang, Yao Zhao, Mohammad Saleh and Peter J. Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/pegasus_x\" rel=\"nofollow\"\u003ePEGASUS-X\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/2208.04347\" rel=\"nofollow\"\u003eInvestigating Efficiently Extending Transformers for Long Input Summarization\u003c/a\u003e by Jason Phang, Yao Zhao, and Peter J. Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/perceiver\" rel=\"nofollow\"\u003ePerceiver IO\u003c/a\u003e\u003c/strong\u003e (from Deepmind) released with the paper \u003ca href=\"https://arxiv.org/abs/2107.14795\" rel=\"nofollow\"\u003ePerceiver IO: A General Architecture for Structured Inputs \u0026amp; Outputs\u003c/a\u003e by Andrew Jaegle, Sebastian Borgeaud, Jean-Baptiste Alayrac, Carl Doersch, Catalin Ionescu, David Ding, Skanda Koppula, Daniel Zoran, Andrew Brock, Evan Shelhamer, Olivier Hénaff, Matthew M. Botvinick, Andrew Zisserman, Oriol Vinyals, João Carreira.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/persimmon\" rel=\"nofollow\"\u003ePersimmon\u003c/a\u003e\u003c/strong\u003e (from ADEPT) released in a \u003ca href=\"https://www.adept.ai/blog/persimmon-8b\" rel=\"nofollow\"\u003eblog post\u003c/a\u003e by Erich Elsen, Augustus Odena, Maxwell Nye, Sağnak Taşırlar, Tri Dao, Curtis Hawthorne, Deepak Moparthi, Arushi Somani.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/phi\" rel=\"nofollow\"\u003ePhi\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the papers - \u003ca href=\"https://arxiv.org/abs/2306.11644\" rel=\"nofollow\"\u003eTextbooks Are All You Need\u003c/a\u003e by Suriya Gunasekar, Yi Zhang, Jyoti Aneja, Caio César Teodoro Mendes, Allie Del Giorno, Sivakanth Gopi, Mojan Javaheripi, Piero Kauffmann, Gustavo de Rosa, Olli Saarikivi, Adil Salim, Shital Shah, Harkirat Singh Behl, Xin Wang, Sébastien Bubeck, Ronen Eldan, Adam Tauman Kalai, Yin Tat Lee and Yuanzhi Li, \u003ca href=\"https://arxiv.org/abs/2309.05463\" rel=\"nofollow\"\u003eTextbooks Are All You Need II: phi-1.5 technical report\u003c/a\u003e by Yuanzhi Li, Sébastien Bubeck, Ronen Eldan, Allie Del Giorno, Suriya Gunasekar and Yin Tat Lee.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/phobert\" rel=\"nofollow\"\u003ePhoBERT\u003c/a\u003e\u003c/strong\u003e (from VinAI Research) released with the paper \u003ca href=\"https://www.aclweb.org/anthology/2020.findings-emnlp.92/\" rel=\"nofollow\"\u003ePhoBERT: Pre-trained language models for Vietnamese\u003c/a\u003e by Dat Quoc Nguyen and Anh Tuan Nguyen.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/pix2struct\" rel=\"nofollow\"\u003ePix2Struct\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/2210.03347\" rel=\"nofollow\"\u003ePix2Struct: Screenshot Parsing as Pretraining for Visual Language Understanding\u003c/a\u003e by Kenton Lee, Mandar Joshi, Iulia Turc, Hexiang Hu, Fangyu Liu, Julian Eisenschlos, Urvashi Khandelwal, Peter Shaw, Ming-Wei Chang, Kristina Toutanova.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/plbart\" rel=\"nofollow\"\u003ePLBart\u003c/a\u003e\u003c/strong\u003e (from UCLA NLP) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.06333\" rel=\"nofollow\"\u003eUnified Pre-training for Program Understanding and Generation\u003c/a\u003e by Wasi Uddin Ahmad, Saikat Chakraborty, Baishakhi Ray, Kai-Wei Chang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/poolformer\" rel=\"nofollow\"\u003ePoolFormer\u003c/a\u003e\u003c/strong\u003e (from Sea AI Labs) released with the paper \u003ca href=\"https://arxiv.org/abs/2111.11418\" rel=\"nofollow\"\u003eMetaFormer is Actually What You Need for Vision\u003c/a\u003e by Yu, Weihao and Luo, Mi and Zhou, Pan and Si, Chenyang and Zhou, Yichen and Wang, Xinchao and Feng, Jiashi and Yan, Shuicheng.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/pop2piano\" rel=\"nofollow\"\u003ePop2Piano\u003c/a\u003e\u003c/strong\u003e released with the paper \u003ca href=\"https://arxiv.org/abs/2211.00895\" rel=\"nofollow\"\u003ePop2Piano : Pop Audio-based Piano Cover Generation\u003c/a\u003e by Jongho Choi and Kyogu Lee.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/prophetnet\" rel=\"nofollow\"\u003eProphetNet\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2001.04063\" rel=\"nofollow\"\u003eProphetNet: Predicting Future N-gram for Sequence-to-Sequence Pre-training\u003c/a\u003e by Yu Yan, Weizhen Qi, Yeyun Gong, Dayiheng Liu, Nan Duan, Jiusheng Chen, Ruofei Zhang and Ming Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/pvt\" rel=\"nofollow\"\u003ePVT\u003c/a\u003e\u003c/strong\u003e (from Nanjing University, The University of Hong Kong etc.) released with the paper \u003ca href=\"https://arxiv.org/pdf/2102.12122.pdf\" rel=\"nofollow\"\u003ePyramid Vision Transformer: A Versatile Backbone for Dense Prediction without Convolutions\u003c/a\u003e by Wenhai Wang, Enze Xie, Xiang Li, Deng-Ping Fan, Kaitao Song, Ding Liang, Tong Lu, Ping Luo, Ling Shao.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/pvt_v2\" rel=\"nofollow\"\u003ePVTv2\u003c/a\u003e\u003c/strong\u003e (from Shanghai AI Laboratory, Nanjing University, The University of Hong Kong etc.) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.13797\" rel=\"nofollow\"\u003ePVT v2: Improved Baselines with Pyramid Vision Transformer\u003c/a\u003e by Wenhai Wang, Enze Xie, Xiang Li, Deng-Ping Fan, Kaitao Song, Ding Liang, Tong Lu, Ping Luo, Ling Shao.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/qdqbert\" rel=\"nofollow\"\u003eQDQBert\u003c/a\u003e\u003c/strong\u003e (from NVIDIA) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.09602\" rel=\"nofollow\"\u003eInteger Quantization for Deep Learning Inference: Principles and Empirical Evaluation\u003c/a\u003e by Hao Wu, Patrick Judd, Xiaojie Zhang, Mikhail Isaev and Paulius Micikevicius.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/qwen2\" rel=\"nofollow\"\u003eQwen2\u003c/a\u003e\u003c/strong\u003e (from the Qwen team, Alibaba Group) released with the paper \u003ca href=\"https://arxiv.org/abs/2309.16609\" rel=\"nofollow\"\u003eQwen Technical Report\u003c/a\u003e by Jinze Bai, Shuai Bai, Yunfei Chu, Zeyu Cui, Kai Dang, Xiaodong Deng, Yang Fan, Wenbin Ge, Yu Han, Fei Huang, Binyuan Hui, Luo Ji, Mei Li, Junyang Lin, Runji Lin, Dayiheng Liu, Gao Liu, Chengqiang Lu, Keming Lu, Jianxin Ma, Rui Men, Xingzhang Ren, Xuancheng Ren, Chuanqi Tan, Sinan Tan, Jianhong Tu, Peng Wang, Shijie Wang, Wei Wang, Shengguang Wu, Benfeng Xu, Jin Xu, An Yang, Hao Yang, Jian Yang, Shusheng Yang, Yang Yao, Bowen Yu, Hongyi Yuan, Zheng Yuan, Jianwei Zhang, Xingxuan Zhang, Yichang Zhang, Zhenru Zhang, Chang Zhou, Jingren Zhou, Xiaohuan Zhou and Tianhang Zhu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/main/model_doc/qwen2_moe\" rel=\"nofollow\"\u003eQwen2MoE\u003c/a\u003e\u003c/strong\u003e (from the Qwen team, Alibaba Group) released with \u003ca href=\"https://qwenlm.github.io/blog/qwen1.5/\" rel=\"nofollow\"\u003eblog post\u003c/a\u003e by Bo Zheng, Dayiheng Liu, Rui Men, Junyang Lin, Zhou San, Bowen Yu, An Yang, Mingfeng Xue, Fei Huang, Binyuan Hui, Mei Li, Tianyu Liu, Xingzhang Ren, Xuancheng Ren, Kexin Yang, Chang Zhou, Jingren Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/rag\" rel=\"nofollow\"\u003eRAG\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2005.11401\" rel=\"nofollow\"\u003eRetrieval-Augmented Generation for Knowledge-Intensive NLP Tasks\u003c/a\u003e by Patrick Lewis, Ethan Perez, Aleksandara Piktus, Fabio Petroni, Vladimir Karpukhin, Naman Goyal, Heinrich Küttler, Mike Lewis, Wen-tau Yih, Tim Rocktäschel, Sebastian Riedel, Douwe Kiela.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/realm.html\" rel=\"nofollow\"\u003eREALM\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2002.08909\" rel=\"nofollow\"\u003eREALM: Retrieval-Augmented Language Model Pre-Training\u003c/a\u003e by Kelvin Guu, Kenton Lee, Zora Tung, Panupong Pasupat and Ming-Wei Chang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/reformer\" rel=\"nofollow\"\u003eReformer\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2001.04451\" rel=\"nofollow\"\u003eReformer: The Efficient Transformer\u003c/a\u003e by Nikita Kitaev, Łukasz Kaiser, Anselm Levskaya.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/regnet\" rel=\"nofollow\"\u003eRegNet\u003c/a\u003e\u003c/strong\u003e (from META Platforms) released with the paper \u003ca href=\"https://arxiv.org/abs/2003.13678\" rel=\"nofollow\"\u003eDesigning Network Design Space\u003c/a\u003e by Ilija Radosavovic, Raj Prateek Kosaraju, Ross Girshick, Kaiming He, Piotr Dollár.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/rembert\" rel=\"nofollow\"\u003eRemBERT\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.12821\" rel=\"nofollow\"\u003eRethinking embedding coupling in pre-trained language models\u003c/a\u003e by Hyung Won Chung, Thibault Févry, Henry Tsai, M. Johnson, Sebastian Ruder.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/resnet\" rel=\"nofollow\"\u003eResNet\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/1512.03385\" rel=\"nofollow\"\u003eDeep Residual Learning for Image Recognition\u003c/a\u003e by Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/roberta\" rel=\"nofollow\"\u003eRoBERTa\u003c/a\u003e\u003c/strong\u003e (from Facebook), released together with the paper \u003ca href=\"https://arxiv.org/abs/1907.11692\" rel=\"nofollow\"\u003eRoBERTa: A Robustly Optimized BERT Pretraining Approach\u003c/a\u003e by Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer, Veselin Stoyanov.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/roberta-prelayernorm\" rel=\"nofollow\"\u003eRoBERTa-PreLayerNorm\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/1904.01038\" rel=\"nofollow\"\u003efairseq: A Fast, Extensible Toolkit for Sequence Modeling\u003c/a\u003e by Myle Ott, Sergey Edunov, Alexei Baevski, Angela Fan, Sam Gross, Nathan Ng, David Grangier, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/roc_bert\" rel=\"nofollow\"\u003eRoCBert\u003c/a\u003e\u003c/strong\u003e (from WeChatAI) released with the paper \u003ca href=\"https://aclanthology.org/2022.acl-long.65.pdf\" rel=\"nofollow\"\u003eRoCBert: Robust Chinese Bert with Multimodal Contrastive Pretraining\u003c/a\u003e by HuiSu, WeiweiShi, XiaoyuShen, XiaoZhou, TuoJi, JiaruiFang, JieZhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/roformer\" rel=\"nofollow\"\u003eRoFormer\u003c/a\u003e\u003c/strong\u003e (from ZhuiyiTechnology), released together with the paper \u003ca href=\"https://arxiv.org/abs/2104.09864\" rel=\"nofollow\"\u003eRoFormer: Enhanced Transformer with Rotary Position Embedding\u003c/a\u003e by Jianlin Su and Yu Lu and Shengfeng Pan and Bo Wen and Yunfeng Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/rwkv\" rel=\"nofollow\"\u003eRWKV\u003c/a\u003e\u003c/strong\u003e (from Bo Peng), released on \u003ca href=\"https://github.com/BlinkDL/RWKV-LM\"\u003ethis repo\u003c/a\u003e by Bo Peng.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/seamless_m4t\" rel=\"nofollow\"\u003eSeamlessM4T\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://dl.fbaipublicfiles.com/seamless/seamless_m4t_paper.pdf\" rel=\"nofollow\"\u003eSeamlessM4T — Massively Multilingual \u0026amp; Multimodal Machine Translation\u003c/a\u003e by the Seamless Communication team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/seamless_m4t_v2\" rel=\"nofollow\"\u003eSeamlessM4Tv2\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://ai.meta.com/research/publications/seamless-multilingual-expressive-and-streaming-speech-translation/\" rel=\"nofollow\"\u003eSeamless: Multilingual Expressive and Streaming Speech Translation\u003c/a\u003e by the Seamless Communication team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/segformer\" rel=\"nofollow\"\u003eSegFormer\u003c/a\u003e\u003c/strong\u003e (from NVIDIA) released with the paper \u003ca href=\"https://arxiv.org/abs/2105.15203\" rel=\"nofollow\"\u003eSegFormer: Simple and Efficient Design for Semantic Segmentation with Transformers\u003c/a\u003e by Enze Xie, Wenhai Wang, Zhiding Yu, Anima Anandkumar, Jose M. Alvarez, Ping Luo.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/seggpt\" rel=\"nofollow\"\u003eSegGPT\u003c/a\u003e\u003c/strong\u003e (from Beijing Academy of Artificial Intelligence (BAAI)) released with the paper \u003ca href=\"https://arxiv.org/abs/2304.03284\" rel=\"nofollow\"\u003eSegGPT: Segmenting Everything In Context\u003c/a\u003e by Xinlong Wang, Xiaosong Zhang, Yue Cao, Wen Wang, Chunhua Shen, Tiejun Huang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/sam\" rel=\"nofollow\"\u003eSegment Anything\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/pdf/2304.02643v1.pdf\" rel=\"nofollow\"\u003eSegment Anything\u003c/a\u003e by Alexander Kirillov, Eric Mintun, Nikhila Ravi, Hanzi Mao, Chloe Rolland, Laura Gustafson, Tete Xiao, Spencer Whitehead, Alex Berg, Wan-Yen Lo, Piotr Dollar, Ross Girshick.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/sew\" rel=\"nofollow\"\u003eSEW\u003c/a\u003e\u003c/strong\u003e (from ASAPP) released with the paper \u003ca href=\"https://arxiv.org/abs/2109.06870\" rel=\"nofollow\"\u003ePerformance-Efficiency Trade-offs in Unsupervised Pre-training for Speech Recognition\u003c/a\u003e by Felix Wu, Kwangyoun Kim, Jing Pan, Kyu Han, Kilian Q. Weinberger, Yoav Artzi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/sew_d\" rel=\"nofollow\"\u003eSEW-D\u003c/a\u003e\u003c/strong\u003e (from ASAPP) released with the paper \u003ca href=\"https://arxiv.org/abs/2109.06870\" rel=\"nofollow\"\u003ePerformance-Efficiency Trade-offs in Unsupervised Pre-training for Speech Recognition\u003c/a\u003e by Felix Wu, Kwangyoun Kim, Jing Pan, Kyu Han, Kilian Q. Weinberger, Yoav Artzi.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/siglip\" rel=\"nofollow\"\u003eSigLIP\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2303.15343\" rel=\"nofollow\"\u003eSigmoid Loss for Language Image Pre-Training\u003c/a\u003e by Xiaohua Zhai, Basil Mustafa, Alexander Kolesnikov, Lucas Beyer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/speecht5\" rel=\"nofollow\"\u003eSpeechT5\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.07205\" rel=\"nofollow\"\u003eSpeechT5: Unified-Modal Encoder-Decoder Pre-Training for Spoken Language Processing\u003c/a\u003e by Junyi Ao, Rui Wang, Long Zhou, Chengyi Wang, Shuo Ren, Yu Wu, Shujie Liu, Tom Ko, Qing Li, Yu Zhang, Zhihua Wei, Yao Qian, Jinyu Li, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/speech_to_text\" rel=\"nofollow\"\u003eSpeechToTextTransformer\u003c/a\u003e\u003c/strong\u003e (from Facebook), released together with the paper \u003ca href=\"https://arxiv.org/abs/2010.05171\" rel=\"nofollow\"\u003efairseq S2T: Fast Speech-to-Text Modeling with fairseq\u003c/a\u003e by Changhan Wang, Yun Tang, Xutai Ma, Anne Wu, Dmytro Okhonko, Juan Pino.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/speech_to_text_2\" rel=\"nofollow\"\u003eSpeechToTextTransformer2\u003c/a\u003e\u003c/strong\u003e (from Facebook), released together with the paper \u003ca href=\"https://arxiv.org/abs/2104.06678\" rel=\"nofollow\"\u003eLarge-Scale Self- and Semi-Supervised Learning for Speech Translation\u003c/a\u003e by Changhan Wang, Anne Wu, Juan Pino, Alexei Baevski, Michael Auli, Alexis Conneau.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/splinter\" rel=\"nofollow\"\u003eSplinter\u003c/a\u003e\u003c/strong\u003e (from Tel Aviv University), released together with the paper \u003ca href=\"https://arxiv.org/abs/2101.00438\" rel=\"nofollow\"\u003eFew-Shot Question Answering by Pretraining Span Selection\u003c/a\u003e by Ori Ram, Yuval Kirstain, Jonathan Berant, Amir Globerson, Omer Levy.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/squeezebert\" rel=\"nofollow\"\u003eSqueezeBERT\u003c/a\u003e\u003c/strong\u003e (from Berkeley) released with the paper \u003ca href=\"https://arxiv.org/abs/2006.11316\" rel=\"nofollow\"\u003eSqueezeBERT: What can computer vision teach NLP about efficient neural networks?\u003c/a\u003e by Forrest N. Iandola, Albert E. Shaw, Ravi Krishna, and Kurt W. Keutzer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/stablelm\" rel=\"nofollow\"\u003eStableLm\u003c/a\u003e\u003c/strong\u003e (from Stability AI) released with the paper \u003ca href=\"https://stability.wandb.io/stability-llm/stable-lm/reports/StableLM-3B-4E1T--VmlldzoyMjU4?accessToken=u3zujipenkx5g7rtcj9qojjgxpconyjktjkli2po09nffrffdhhchq045vp0wyfo\" rel=\"nofollow\"\u003eStableLM 3B 4E1T (Technical Report)\u003c/a\u003e by Jonathan Tow, Marco Bellagente, Dakota Mahan, Carlos Riquelme Ruiz, Duy Phung, Maksym Zhuravinskyi, Nathan Cooper, Nikhil Pinnaparaju, Reshinth Adithyan, and James Baicoianu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/starcoder2\" rel=\"nofollow\"\u003eStarcoder2\u003c/a\u003e\u003c/strong\u003e (from BigCode team) released with the paper \u003ca href=\"https://arxiv.org/abs/2402.19173\" rel=\"nofollow\"\u003eStarCoder 2 and The Stack v2: The Next Generation\u003c/a\u003e by Anton Lozhkov, Raymond Li, Loubna Ben Allal, Federico Cassano, Joel Lamy-Poirier, Nouamane Tazi, Ao Tang, Dmytro Pykhtar, Jiawei Liu, Yuxiang Wei, Tianyang Liu, Max Tian, Denis Kocetkov, Arthur Zucker, Younes Belkada, Zijian Wang, Qian Liu, Dmitry Abulkhanov, Indraneil Paul, Zhuang Li, Wen-Ding Li, Megan Risdal, Jia Li, Jian Zhu, Terry Yue Zhuo, Evgenii Zheltonozhskii, Nii Osae Osae Dade, Wenhao Yu, Lucas Krauß, Naman Jain, Yixuan Su, Xuanli He, Manan Dey, Edoardo Abati, Yekun Chai, Niklas Muennighoff, Xiangru Tang, Muhtasham Oblokulov, Christopher Akiki, Marc Marone, Chenghao Mou, Mayank Mishra, Alex Gu, Binyuan Hui, Tri Dao, Armel Zebaze, Olivier Dehaene, Nicolas Patry, Canwen Xu, Julian McAuley, Han Hu, Torsten Scholak, Sebastien Paquet, Jennifer Robinson, Carolyn Jane Anderson, Nicolas Chapados, Mostofa Patwary, Nima Tajbakhsh, Yacine Jernite, Carlos Muñoz Ferrandis, Lingming Zhang, Sean Hughes, Thomas Wolf, Arjun Guha, Leandro von Werra, and Harm de Vries.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/superpoint\" rel=\"nofollow\"\u003eSuperPoint\u003c/a\u003e\u003c/strong\u003e (from MagicLeap) released with the paper \u003ca href=\"https://arxiv.org/abs/1712.07629\" rel=\"nofollow\"\u003eSuperPoint: Self-Supervised Interest Point Detection and Description\u003c/a\u003e by Daniel DeTone, Tomasz Malisiewicz and Andrew Rabinovich.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/swiftformer\" rel=\"nofollow\"\u003eSwiftFormer\u003c/a\u003e\u003c/strong\u003e (from MBZUAI) released with the paper \u003ca href=\"https://arxiv.org/abs/2303.15446\" rel=\"nofollow\"\u003eSwiftFormer: Efficient Additive Attention for Transformer-based Real-time Mobile Vision Applications\u003c/a\u003e by Abdelrahman Shaker, Muhammad Maaz, Hanoona Rasheed, Salman Khan, Ming-Hsuan Yang, Fahad Shahbaz Khan.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/swin\" rel=\"nofollow\"\u003eSwin Transformer\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.14030\" rel=\"nofollow\"\u003eSwin Transformer: Hierarchical Vision Transformer using Shifted Windows\u003c/a\u003e by Ze Liu, Yutong Lin, Yue Cao, Han Hu, Yixuan Wei, Zheng Zhang, Stephen Lin, Baining Guo.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/swinv2\" rel=\"nofollow\"\u003eSwin Transformer V2\u003c/a\u003e\u003c/strong\u003e (from Microsoft) released with the paper \u003ca href=\"https://arxiv.org/abs/2111.09883\" rel=\"nofollow\"\u003eSwin Transformer V2: Scaling Up Capacity and Resolution\u003c/a\u003e by Ze Liu, Han Hu, Yutong Lin, Zhuliang Yao, Zhenda Xie, Yixuan Wei, Jia Ning, Yue Cao, Zheng Zhang, Li Dong, Furu Wei, Baining Guo.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/swin2sr\" rel=\"nofollow\"\u003eSwin2SR\u003c/a\u003e\u003c/strong\u003e (from University of Würzburg) released with the paper \u003ca href=\"https://arxiv.org/abs/2209.11345\" rel=\"nofollow\"\u003eSwin2SR: SwinV2 Transformer for Compressed Image Super-Resolution and Restoration\u003c/a\u003e by Marcos V. Conde, Ui-Jin Choi, Maxime Burchi, Radu Timofte.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/switch_transformers\" rel=\"nofollow\"\u003eSwitchTransformers\u003c/a\u003e\u003c/strong\u003e (from Google) released with the paper \u003ca href=\"https://arxiv.org/abs/2101.03961\" rel=\"nofollow\"\u003eSwitch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity\u003c/a\u003e by William Fedus, Barret Zoph, Noam Shazeer.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/t5\" rel=\"nofollow\"\u003eT5\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/1910.10683\" rel=\"nofollow\"\u003eExploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer\u003c/a\u003e by Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/t5v1.1\" rel=\"nofollow\"\u003eT5v1.1\u003c/a\u003e\u003c/strong\u003e (from Google AI) released in the repository \u003ca href=\"https://github.com/google-research/text-to-text-transfer-transformer/blob/main/released_checkpoints.md#t511\"\u003egoogle-research/text-to-text-transfer-transformer\u003c/a\u003e by Colin Raffel and Noam Shazeer and Adam Roberts and Katherine Lee and Sharan Narang and Michael Matena and Yanqi Zhou and Wei Li and Peter J. Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/table-transformer\" rel=\"nofollow\"\u003eTable Transformer\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.00061\" rel=\"nofollow\"\u003ePubTables-1M: Towards Comprehensive Table Extraction From Unstructured Documents\u003c/a\u003e by Brandon Smock, Rohith Pesala, Robin Abraham.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/tapas\" rel=\"nofollow\"\u003eTAPAS\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2004.02349\" rel=\"nofollow\"\u003eTAPAS: Weakly Supervised Table Parsing via Pre-training\u003c/a\u003e by Jonathan Herzig, Paweł Krzysztof Nowak, Thomas Müller, Francesco Piccinno and Julian Martin Eisenschlos.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/tapex\" rel=\"nofollow\"\u003eTAPEX\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2107.07653\" rel=\"nofollow\"\u003eTAPEX: Table Pre-training via Learning a Neural SQL Executor\u003c/a\u003e by Qian Liu, Bei Chen, Jiaqi Guo, Morteza Ziyadi, Zeqi Lin, Weizhu Chen, Jian-Guang Lou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/time_series_transformer\" rel=\"nofollow\"\u003eTime Series Transformer\u003c/a\u003e\u003c/strong\u003e (from HuggingFace).\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/timesformer\" rel=\"nofollow\"\u003eTimeSformer\u003c/a\u003e\u003c/strong\u003e (from Facebook) released with the paper \u003ca href=\"https://arxiv.org/abs/2102.05095\" rel=\"nofollow\"\u003eIs Space-Time Attention All You Need for Video Understanding?\u003c/a\u003e by Gedas Bertasius, Heng Wang, Lorenzo Torresani.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/trajectory_transformers\" rel=\"nofollow\"\u003eTrajectory Transformer\u003c/a\u003e\u003c/strong\u003e (from the University of California at Berkeley) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.02039\" rel=\"nofollow\"\u003eOffline Reinforcement Learning as One Big Sequence Modeling Problem\u003c/a\u003e by Michael Janner, Qiyang Li, Sergey Levine\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/transfo-xl\" rel=\"nofollow\"\u003eTransformer-XL\u003c/a\u003e\u003c/strong\u003e (from Google/CMU) released with the paper \u003ca href=\"https://arxiv.org/abs/1901.02860\" rel=\"nofollow\"\u003eTransformer-XL: Attentive Language Models Beyond a Fixed-Length Context\u003c/a\u003e by Zihang Dai*, Zhilin Yang*, Yiming Yang, Jaime Carbonell, Quoc V. Le, Ruslan Salakhutdinov.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/trocr\" rel=\"nofollow\"\u003eTrOCR\u003c/a\u003e\u003c/strong\u003e (from Microsoft), released together with the paper \u003ca href=\"https://arxiv.org/abs/2109.10282\" rel=\"nofollow\"\u003eTrOCR: Transformer-based Optical Character Recognition with Pre-trained Models\u003c/a\u003e by Minghao Li, Tengchao Lv, Lei Cui, Yijuan Lu, Dinei Florencio, Cha Zhang, Zhoujun Li, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/tvlt\" rel=\"nofollow\"\u003eTVLT\u003c/a\u003e\u003c/strong\u003e (from UNC Chapel Hill) released with the paper \u003ca href=\"https://arxiv.org/abs/2209.14156\" rel=\"nofollow\"\u003eTVLT: Textless Vision-Language Transformer\u003c/a\u003e by Zineng Tang, Jaemin Cho, Yixin Nie, Mohit Bansal.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/tvp\" rel=\"nofollow\"\u003eTVP\u003c/a\u003e\u003c/strong\u003e (from Intel) released with the paper \u003ca href=\"https://arxiv.org/abs/2303.04995\" rel=\"nofollow\"\u003eText-Visual Prompting for Efficient 2D Temporal Video Grounding\u003c/a\u003e by Yimeng Zhang, Xin Chen, Jinghan Jia, Sijia Liu, Ke Ding.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/udop\" rel=\"nofollow\"\u003eUDOP\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2212.02623\" rel=\"nofollow\"\u003eUnifying Vision, Text, and Layout for Universal Document Processing\u003c/a\u003e by Zineng Tang, Ziyi Yang, Guoxin Wang, Yuwei Fang, Yang Liu, Chenguang Zhu, Michael Zeng, Cha Zhang, Mohit Bansal.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/ul2\" rel=\"nofollow\"\u003eUL2\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2205.05131v1\" rel=\"nofollow\"\u003eUnifying Language Learning Paradigms\u003c/a\u003e by Yi Tay, Mostafa Dehghani, Vinh Q. Tran, Xavier Garcia, Dara Bahri, Tal Schuster, Huaixiu Steven Zheng, Neil Houlsby, Donald Metzler\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/umt5\" rel=\"nofollow\"\u003eUMT5\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://openreview.net/forum?id=kXwdL1cWOAi\" rel=\"nofollow\"\u003eUniMax: Fairer and More Effective Language Sampling for Large-Scale Multilingual Pretraining\u003c/a\u003e by Hyung Won Chung, Xavier Garcia, Adam Roberts, Yi Tay, Orhan Firat, Sharan Narang, Noah Constant.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/unispeech\" rel=\"nofollow\"\u003eUniSpeech\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2101.07597\" rel=\"nofollow\"\u003eUniSpeech: Unified Speech Representation Learning with Labeled and Unlabeled Data\u003c/a\u003e by Chengyi Wang, Yu Wu, Yao Qian, Kenichi Kumatani, Shujie Liu, Furu Wei, Michael Zeng, Xuedong Huang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/unispeech-sat\" rel=\"nofollow\"\u003eUniSpeechSat\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.05752\" rel=\"nofollow\"\u003eUNISPEECH-SAT: UNIVERSAL SPEECH REPRESENTATION LEARNING WITH SPEAKER AWARE PRE-TRAINING\u003c/a\u003e by Sanyuan Chen, Yu Wu, Chengyi Wang, Zhengyang Chen, Zhuo Chen, Shujie Liu, Jian Wu, Yao Qian, Furu Wei, Jinyu Li, Xiangzhan Yu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/univnet\" rel=\"nofollow\"\u003eUnivNet\u003c/a\u003e\u003c/strong\u003e (from Kakao Corporation) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.07889\" rel=\"nofollow\"\u003eUnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation\u003c/a\u003e by Won Jang, Dan Lim, Jaesam Yoon, Bongwan Kim, and Juntae Kim.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/upernet\" rel=\"nofollow\"\u003eUPerNet\u003c/a\u003e\u003c/strong\u003e (from Peking University) released with the paper \u003ca href=\"https://arxiv.org/abs/1807.10221\" rel=\"nofollow\"\u003eUnified Perceptual Parsing for Scene Understanding\u003c/a\u003e by Tete Xiao, Yingcheng Liu, Bolei Zhou, Yuning Jiang, Jian Sun.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/van\" rel=\"nofollow\"\u003eVAN\u003c/a\u003e\u003c/strong\u003e (from Tsinghua University and Nankai University) released with the paper \u003ca href=\"https://arxiv.org/abs/2202.09741\" rel=\"nofollow\"\u003eVisual Attention Network\u003c/a\u003e by Meng-Hao Guo, Cheng-Ze Lu, Zheng-Ning Liu, Ming-Ming Cheng, Shi-Min Hu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/videomae\" rel=\"nofollow\"\u003eVideoMAE\u003c/a\u003e\u003c/strong\u003e (from Multimedia Computing Group, Nanjing University) released with the paper \u003ca href=\"https://arxiv.org/abs/2203.12602\" rel=\"nofollow\"\u003eVideoMAE: Masked Autoencoders are Data-Efficient Learners for Self-Supervised Video Pre-Training\u003c/a\u003e by Zhan Tong, Yibing Song, Jue Wang, Limin Wang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vilt\" rel=\"nofollow\"\u003eViLT\u003c/a\u003e\u003c/strong\u003e (from NAVER AI Lab/Kakao Enterprise/Kakao Brain) released with the paper \u003ca href=\"https://arxiv.org/abs/2102.03334\" rel=\"nofollow\"\u003eViLT: Vision-and-Language Transformer Without Convolution or Region Supervision\u003c/a\u003e by Wonjae Kim, Bokyung Son, Ildoo Kim.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vipllava\" rel=\"nofollow\"\u003eVipLlava\u003c/a\u003e\u003c/strong\u003e (from University of Wisconsin–Madison) released with the paper \u003ca href=\"https://arxiv.org/abs/2312.00784\" rel=\"nofollow\"\u003eMaking Large Multimodal Models Understand Arbitrary Visual Prompts\u003c/a\u003e by Mu Cai, Haotian Liu, Siva Karthik Mustikovela, Gregory P. Meyer, Yuning Chai, Dennis Park, Yong Jae Lee.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vit\" rel=\"nofollow\"\u003eVision Transformer (ViT)\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.11929\" rel=\"nofollow\"\u003eAn Image is Worth 16x16 Words: Transformers for Image Recognition at Scale\u003c/a\u003e by Alexey Dosovitskiy, Lucas Beyer, Alexander Kolesnikov, Dirk Weissenborn, Xiaohua Zhai, Thomas Unterthiner, Mostafa Dehghani, Matthias Minderer, Georg Heigold, Sylvain Gelly, Jakob Uszkoreit, Neil Houlsby.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/visual_bert\" rel=\"nofollow\"\u003eVisualBERT\u003c/a\u003e\u003c/strong\u003e (from UCLA NLP) released with the paper \u003ca href=\"https://arxiv.org/pdf/1908.03557\" rel=\"nofollow\"\u003eVisualBERT: A Simple and Performant Baseline for Vision and Language\u003c/a\u003e by Liunian Harold Li, Mark Yatskar, Da Yin, Cho-Jui Hsieh, Kai-Wei Chang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vit_hybrid\" rel=\"nofollow\"\u003eViT Hybrid\u003c/a\u003e\u003c/strong\u003e (from Google AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.11929\" rel=\"nofollow\"\u003eAn Image is Worth 16x16 Words: Transformers for Image Recognition at Scale\u003c/a\u003e by Alexey Dosovitskiy, Lucas Beyer, Alexander Kolesnikov, Dirk Weissenborn, Xiaohua Zhai, Thomas Unterthiner, Mostafa Dehghani, Matthias Minderer, Georg Heigold, Sylvain Gelly, Jakob Uszkoreit, Neil Houlsby.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vitdet\" rel=\"nofollow\"\u003eVitDet\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2203.16527\" rel=\"nofollow\"\u003eExploring Plain Vision Transformer Backbones for Object Detection\u003c/a\u003e by Yanghao Li, Hanzi Mao, Ross Girshick, Kaiming He.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vit_mae\" rel=\"nofollow\"\u003eViTMAE\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2111.06377\" rel=\"nofollow\"\u003eMasked Autoencoders Are Scalable Vision Learners\u003c/a\u003e by Kaiming He, Xinlei Chen, Saining Xie, Yanghao Li, Piotr Dollár, Ross Girshick.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vitmatte\" rel=\"nofollow\"\u003eViTMatte\u003c/a\u003e\u003c/strong\u003e (from HUST-VL) released with the paper \u003ca href=\"https://arxiv.org/abs/2305.15272\" rel=\"nofollow\"\u003eViTMatte: Boosting Image Matting with Pretrained Plain Vision Transformers\u003c/a\u003e by Jingfeng Yao, Xinggang Wang, Shusheng Yang, Baoyuan Wang.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vit_msn\" rel=\"nofollow\"\u003eViTMSN\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2204.07141\" rel=\"nofollow\"\u003eMasked Siamese Networks for Label-Efficient Learning\u003c/a\u003e by Mahmoud Assran, Mathilde Caron, Ishan Misra, Piotr Bojanowski, Florian Bordes, Pascal Vincent, Armand Joulin, Michael Rabbat, Nicolas Ballas.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vits\" rel=\"nofollow\"\u003eVITS\u003c/a\u003e\u003c/strong\u003e (from Kakao Enterprise) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.06103\" rel=\"nofollow\"\u003eConditional Variational Autoencoder with Adversarial Learning for End-to-End Text-to-Speech\u003c/a\u003e by Jaehyeon Kim, Jungil Kong, Juhee Son.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/vivit\" rel=\"nofollow\"\u003eViViT\u003c/a\u003e\u003c/strong\u003e (from Google Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2103.15691\" rel=\"nofollow\"\u003eViViT: A Video Vision Transformer\u003c/a\u003e by Anurag Arnab, Mostafa Dehghani, Georg Heigold, Chen Sun, Mario Lučić, Cordelia Schmid.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/wav2vec2\" rel=\"nofollow\"\u003eWav2Vec2\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2006.11477\" rel=\"nofollow\"\u003ewav2vec 2.0: A Framework for Self-Supervised Learning of Speech Representations\u003c/a\u003e by Alexei Baevski, Henry Zhou, Abdelrahman Mohamed, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/wav2vec2-bert\" rel=\"nofollow\"\u003eWav2Vec2-BERT\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://ai.meta.com/research/publications/seamless-multilingual-expressive-and-streaming-speech-translation/\" rel=\"nofollow\"\u003eSeamless: Multilingual Expressive and Streaming Speech Translation\u003c/a\u003e by the Seamless Communication team.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/wav2vec2-conformer\" rel=\"nofollow\"\u003eWav2Vec2-Conformer\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2010.05171\" rel=\"nofollow\"\u003eFAIRSEQ S2T: Fast Speech-to-Text Modeling with FAIRSEQ\u003c/a\u003e by Changhan Wang, Yun Tang, Xutai Ma, Anne Wu, Sravya Popuri, Dmytro Okhonko, Juan Pino.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/wav2vec2_phoneme\" rel=\"nofollow\"\u003eWav2Vec2Phoneme\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2109.11680\" rel=\"nofollow\"\u003eSimple and Effective Zero-shot Cross-lingual Phoneme Recognition\u003c/a\u003e by Qiantong Xu, Alexei Baevski, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/wavlm\" rel=\"nofollow\"\u003eWavLM\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2110.13900\" rel=\"nofollow\"\u003eWavLM: Large-Scale Self-Supervised Pre-Training for Full Stack Speech Processing\u003c/a\u003e by Sanyuan Chen, Chengyi Wang, Zhengyang Chen, Yu Wu, Shujie Liu, Zhuo Chen, Jinyu Li, Naoyuki Kanda, Takuya Yoshioka, Xiong Xiao, Jian Wu, Long Zhou, Shuo Ren, Yanmin Qian, Yao Qian, Jian Wu, Michael Zeng, Furu Wei.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/whisper\" rel=\"nofollow\"\u003eWhisper\u003c/a\u003e\u003c/strong\u003e (from OpenAI) released with the paper \u003ca href=\"https://cdn.openai.com/papers/whisper.pdf\" rel=\"nofollow\"\u003eRobust Speech Recognition via Large-Scale Weak Supervision\u003c/a\u003e by Alec Radford, Jong Wook Kim, Tao Xu, Greg Brockman, Christine McLeavey, Ilya Sutskever.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xclip\" rel=\"nofollow\"\u003eX-CLIP\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2208.02816\" rel=\"nofollow\"\u003eExpanding Language-Image Pretrained Models for General Video Recognition\u003c/a\u003e by Bolin Ni, Houwen Peng, Minghao Chen, Songyang Zhang, Gaofeng Meng, Jianlong Fu, Shiming Xiang, Haibin Ling.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xmod\" rel=\"nofollow\"\u003eX-MOD\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"http://dx.doi.org/10.18653/v1/2022.naacl-main.255\" rel=\"nofollow\"\u003eLifting the Curse of Multilinguality by Pre-training Modular Transformers\u003c/a\u003e by Jonas Pfeiffer, Naman Goyal, Xi Lin, Xian Li, James Cross, Sebastian Riedel, Mikel Artetxe.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xglm\" rel=\"nofollow\"\u003eXGLM\u003c/a\u003e\u003c/strong\u003e (From Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2112.10668\" rel=\"nofollow\"\u003eFew-shot Learning with Multilingual Language Models\u003c/a\u003e by Xi Victoria Lin, Todor Mihaylov, Mikel Artetxe, Tianlu Wang, Shuohui Chen, Daniel Simig, Myle Ott, Naman Goyal, Shruti Bhosale, Jingfei Du, Ramakanth Pasunuru, Sam Shleifer, Punit Singh Koura, Vishrav Chaudhary, Brian O'Horo, Jeff Wang, Luke Zettlemoyer, Zornitsa Kozareva, Mona Diab, Veselin Stoyanov, Xian Li.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlm\" rel=\"nofollow\"\u003eXLM\u003c/a\u003e\u003c/strong\u003e (from Facebook) released together with the paper \u003ca href=\"https://arxiv.org/abs/1901.07291\" rel=\"nofollow\"\u003eCross-lingual Language Model Pretraining\u003c/a\u003e by Guillaume Lample and Alexis Conneau.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlm-prophetnet\" rel=\"nofollow\"\u003eXLM-ProphetNet\u003c/a\u003e\u003c/strong\u003e (from Microsoft Research) released with the paper \u003ca href=\"https://arxiv.org/abs/2001.04063\" rel=\"nofollow\"\u003eProphetNet: Predicting Future N-gram for Sequence-to-Sequence Pre-training\u003c/a\u003e by Yu Yan, Weizhen Qi, Yeyun Gong, Dayiheng Liu, Nan Duan, Jiusheng Chen, Ruofei Zhang and Ming Zhou.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlm-roberta\" rel=\"nofollow\"\u003eXLM-RoBERTa\u003c/a\u003e\u003c/strong\u003e (from Facebook AI), released together with the paper \u003ca href=\"https://arxiv.org/abs/1911.02116\" rel=\"nofollow\"\u003eUnsupervised Cross-lingual Representation Learning at Scale\u003c/a\u003e by Alexis Conneau*, Kartikay Khandelwal*, Naman Goyal, Vishrav Chaudhary, Guillaume Wenzek, Francisco Guzmán, Edouard Grave, Myle Ott, Luke Zettlemoyer and Veselin Stoyanov.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlm-roberta-xl\" rel=\"nofollow\"\u003eXLM-RoBERTa-XL\u003c/a\u003e\u003c/strong\u003e (from Facebook AI), released together with the paper \u003ca href=\"https://arxiv.org/abs/2105.00572\" rel=\"nofollow\"\u003eLarger-Scale Transformers for Multilingual Masked Language Modeling\u003c/a\u003e by Naman Goyal, Jingfei Du, Myle Ott, Giri Anantharaman, Alexis Conneau.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlm-v\" rel=\"nofollow\"\u003eXLM-V\u003c/a\u003e\u003c/strong\u003e (from Meta AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2301.10472\" rel=\"nofollow\"\u003eXLM-V: Overcoming the Vocabulary Bottleneck in Multilingual Masked Language Models\u003c/a\u003e by Davis Liang, Hila Gonen, Yuning Mao, Rui Hou, Naman Goyal, Marjan Ghazvininejad, Luke Zettlemoyer, Madian Khabsa.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlnet\" rel=\"nofollow\"\u003eXLNet\u003c/a\u003e\u003c/strong\u003e (from Google/CMU) released with the paper \u003ca href=\"https://arxiv.org/abs/1906.08237\" rel=\"nofollow\"\u003eXLNet: Generalized Autoregressive Pretraining for Language Understanding\u003c/a\u003e by Zhilin Yang*, Zihang Dai*, Yiming Yang, Jaime Carbonell, Ruslan Salakhutdinov, Quoc V. Le.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xls_r\" rel=\"nofollow\"\u003eXLS-R\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2111.09296\" rel=\"nofollow\"\u003eXLS-R: Self-supervised Cross-lingual Speech Representation Learning at Scale\u003c/a\u003e by Arun Babu, Changhan Wang, Andros Tjandra, Kushal Lakhotia, Qiantong Xu, Naman Goyal, Kritika Singh, Patrick von Platen, Yatharth Saraf, Juan Pino, Alexei Baevski, Alexis Conneau, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/xlsr_wav2vec2\" rel=\"nofollow\"\u003eXLSR-Wav2Vec2\u003c/a\u003e\u003c/strong\u003e (from Facebook AI) released with the paper \u003ca href=\"https://arxiv.org/abs/2006.13979\" rel=\"nofollow\"\u003eUnsupervised Cross-Lingual Representation Learning For Speech Recognition\u003c/a\u003e by Alexis Conneau, Alexei Baevski, Ronan Collobert, Abdelrahman Mohamed, Michael Auli.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/yolos\" rel=\"nofollow\"\u003eYOLOS\u003c/a\u003e\u003c/strong\u003e (from Huazhong University of Science \u0026amp; Technology) released with the paper \u003ca href=\"https://arxiv.org/abs/2106.00666\" rel=\"nofollow\"\u003eYou Only Look at One Sequence: Rethinking Transformer in Vision through Object Detection\u003c/a\u003e by Yuxin Fang, Bencheng Liao, Xinggang Wang, Jiemin Fang, Jiyang Qi, Rui Wu, Jianwei Niu, Wenyu Liu.\u003c/li\u003e\n\u003cli\u003e\u003cstrong\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_doc/yoso\" rel=\"nofollow\"\u003eYOSO\u003c/a\u003e\u003c/strong\u003e (from the University of Wisconsin - Madison) released with the paper \u003ca href=\"https://arxiv.org/abs/2111.09714\" rel=\"nofollow\"\u003eYou Only Sample (Almost) Once: Linear Cost Self-Attention Via Bernoulli Sampling\u003c/a\u003e by Zhanpeng Zeng, Yunyang Xiong, Sathya N. Ravi, Shailesh Acharya, Glenn Fung, Vikas Singh.\u003c/li\u003e\n\u003cli\u003eWant to contribute a new model? We have added a \u003cstrong\u003edetailed guide and templates\u003c/strong\u003e to guide you in the process of adding a new model. You can find them in the \u003ca href=\"/huggingface/transformers/blob/main/templates\"\u003e\u003ccode\u003etemplates\u003c/code\u003e\u003c/a\u003e folder of the repository. Be sure to check the \u003ca href=\"/huggingface/transformers/blob/main/CONTRIBUTING.md\"\u003econtributing guidelines\u003c/a\u003e and contact the maintainers or open an issue to collect feedback before starting your PR.\u003c/li\u003e\n\u003c/ol\u003e\n\u003cp dir=\"auto\"\u003eTo check if each model has an implementation in Flax, PyTorch or TensorFlow, or has an associated tokenizer backed by the 🤗 Tokenizers library, refer to \u003ca href=\"https://huggingface.co/docs/transformers/index#supported-frameworks\" rel=\"nofollow\"\u003ethis table\u003c/a\u003e.\u003c/p\u003e\n\u003cp dir=\"auto\"\u003eThese implementations have been tested on several datasets (see the example scripts) and should match the performance of the original implementations. You can find more details on performance in the Examples section of the \u003ca href=\"https://github.com/huggingface/transformers/tree/main/examples\"\u003edocumentation\u003c/a\u003e.\u003c/p\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eLearn more\u003c/h2\u003e\u003ca id=\"user-content-learn-more\" class=\"anchor\" aria-label=\"Permalink: Learn more\" href=\"#learn-more\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003ctable\u003e\n\u003cthead\u003e\n\u003ctr\u003e\n\u003cth\u003eSection\u003c/th\u003e\n\u003cth\u003eDescription\u003c/th\u003e\n\u003c/tr\u003e\n\u003c/thead\u003e\n\u003ctbody\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://huggingface.co/docs/transformers/\" rel=\"nofollow\"\u003eDocumentation\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003eFull API documentation and tutorials\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://huggingface.co/docs/transformers/task_summary\" rel=\"nofollow\"\u003eTask summary\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003eTasks supported by 🤗 Transformers\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://huggingface.co/docs/transformers/preprocessing\" rel=\"nofollow\"\u003ePreprocessing tutorial\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003eUsing the \u003ccode\u003eTokenizer\u003c/code\u003e class to prepare data for the models\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://huggingface.co/docs/transformers/training\" rel=\"nofollow\"\u003eTraining and fine-tuning\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003eUsing the models provided by 🤗 Transformers in a PyTorch/TensorFlow training loop and the \u003ccode\u003eTrainer\u003c/code\u003e API\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://github.com/huggingface/transformers/tree/main/examples\"\u003eQuick tour: Fine-tuning/usage scripts\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003eExample scripts for fine-tuning models on a wide range of tasks\u003c/td\u003e\n\u003c/tr\u003e\n\u003ctr\u003e\n\u003ctd\u003e\u003ca href=\"https://huggingface.co/docs/transformers/model_sharing\" rel=\"nofollow\"\u003eModel sharing and uploading\u003c/a\u003e\u003c/td\u003e\n\u003ctd\u003eUpload and share your fine-tuned models with the community\u003c/td\u003e\n\u003c/tr\u003e\n\u003c/tbody\u003e\n\u003c/table\u003e\n\u003cdiv class=\"markdown-heading\" dir=\"auto\"\u003e\u003ch2 tabindex=\"-1\" class=\"heading-element\" dir=\"auto\"\u003eCitation\u003c/h2\u003e\u003ca id=\"user-content-citation\" class=\"anchor\" aria-label=\"Permalink: Citation\" href=\"#citation\"\u003e\u003csvg class=\"octicon octicon-link\" viewBox=\"0 0 16 16\" version=\"1.1\" width=\"16\" height=\"16\" aria-hidden=\"true\"\u003e\u003cpath d=\"m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z\"\u003e\u003c/path\u003e\u003c/svg\u003e\u003c/a\u003e\u003c/div\u003e\n\u003cp dir=\"auto\"\u003eWe now have a \u003ca href=\"https://www.aclweb.org/anthology/2020.emnlp-demos.6/\" rel=\"nofollow\"\u003epaper\u003c/a\u003e you can cite for the 🤗 Transformers library:\u003c/p\u003e\n\u003cdiv class=\"highlight highlight-text-bibtex notranslate position-relative overflow-auto\" dir=\"auto\" data-snippet-clipboard-copy-content=\"@inproceedings{wolf-etal-2020-transformers,\n title = \u0026quot;Transformers: State-of-the-Art Natural Language Processing\u0026quot;,\n author = \u0026quot;Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush\u0026quot;,\n booktitle = \u0026quot;Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations\u0026quot;,\n month = oct,\n year = \u0026quot;2020\u0026quot;,\n address = \u0026quot;Online\u0026quot;,\n publisher = \u0026quot;Association for Computational Linguistics\u0026quot;,\n url = \u0026quot;https://www.aclweb.org/anthology/2020.emnlp-demos.6\u0026quot;,\n pages = \u0026quot;38--45\u0026quot;\n}\"\u003e\u003cpre\u003e\u003cspan class=\"pl-k\"\u003e@inproceedings\u003c/span\u003e{\u003cspan class=\"pl-en\"\u003ewolf-etal-2020-transformers\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003etitle\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eTransformers: State-of-the-Art Natural Language Processing\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003eauthor\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eThomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003ebooktitle\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eProceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003emonth\u003c/span\u003e = oct,\n \u003cspan class=\"pl-s\"\u003eyear\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e2020\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003eaddress\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eOnline\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003epublisher\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003eAssociation for Computational Linguistics\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003eurl\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003ehttps://www.aclweb.org/anthology/2020.emnlp-demos.6\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e,\n \u003cspan class=\"pl-s\"\u003epages\u003c/span\u003e = \u003cspan class=\"pl-s\"\u003e\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e38--45\u003cspan class=\"pl-pds\"\u003e\"\u003c/span\u003e\u003c/span\u003e\n}\u003c/pre\u003e\u003c/div\u003e\n\u003c/article\u003e","loaded":true,"timedOut":false,"errorMessage":null,"headerInfo":{"toc":[{"level":4,"text":"English |\n 简体中文 |\n 繁體中文 |\n 한국어 |\n Español |\n 日本語 |\n हिन्दी |\n Русский |\n Рortuguês |\n తెలుగు |\n Français |\n Deutsch |\n Tiếng Việt |\n ","anchor":"------------english---------简体中文---------繁體中文---------한국어---------español---------日本語---------हिन्दी---------русский---------рortuguês---------తెలుగు---------français---------deutsch---------tiếng-việt-----","htmlText":"English |\n 简体中文 |\n 繁體中文 |\n 한국어 |\n Español |\n 日本語 |\n हिन्दी |\n Русский |\n Рortuguês |\n తెలుగు |\n Français |\n Deutsch |\n Tiếng Việt |\n "},{"level":3,"text":"State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow","anchor":"----state-of-the-art-machine-learning-for-jax-pytorch-and-tensorflow","htmlText":"State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow"},{"level":3,"text":"","anchor":"----","htmlText":""},{"level":2,"text":"Online demos","anchor":"online-demos","htmlText":"Online demos"},{"level":2,"text":"100 projects using Transformers","anchor":"100-projects-using-transformers","htmlText":"100 projects using Transformers"},{"level":2,"text":"If you are looking for custom support from the Hugging Face team","anchor":"if-you-are-looking-for-custom-support-from-the-hugging-face-team","htmlText":"If you are looking for custom support from the Hugging Face team"},{"level":2,"text":"Quick tour","anchor":"quick-tour","htmlText":"Quick tour"},{"level":3,"text":"","anchor":"--------","htmlText":""},{"level":2,"text":"Why should I use transformers?","anchor":"why-should-i-use-transformers","htmlText":"Why should I use transformers?"},{"level":2,"text":"Why shouldn't I use transformers?","anchor":"why-shouldnt-i-use-transformers","htmlText":"Why shouldn't I use transformers?"},{"level":2,"text":"Installation","anchor":"installation","htmlText":"Installation"},{"level":3,"text":"With pip","anchor":"with-pip","htmlText":"With pip"},{"level":3,"text":"With conda","anchor":"with-conda","htmlText":"With conda"},{"level":2,"text":"Model architectures","anchor":"model-architectures","htmlText":"Model architectures"},{"level":2,"text":"Learn more","anchor":"learn-more","htmlText":"Learn more"},{"level":2,"text":"Citation","anchor":"citation","htmlText":"Citation"}],"siteNavLoginPath":"/login?return_to=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers"}},{"displayName":"CODE_OF_CONDUCT.md","repoName":"transformers","refName":"main","path":"CODE_OF_CONDUCT.md","preferredFileType":"code_of_conduct","tabName":"Code of conduct","richText":null,"loaded":false,"timedOut":false,"errorMessage":null,"headerInfo":{"toc":null,"siteNavLoginPath":"/login?return_to=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers"}},{"displayName":"LICENSE","repoName":"transformers","refName":"main","path":"LICENSE","preferredFileType":"license","tabName":"Apache-2.0","richText":null,"loaded":false,"timedOut":false,"errorMessage":null,"headerInfo":{"toc":null,"siteNavLoginPath":"/login?return_to=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers"}},{"displayName":"SECURITY.md","repoName":"transformers","refName":"main","path":"SECURITY.md","preferredFileType":"security","tabName":"Security","richText":null,"loaded":false,"timedOut":false,"errorMessage":null,"headerInfo":{"toc":null,"siteNavLoginPath":"/login?return_to=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers"}}],"overviewFilesProcessingTime":38.962792}},"appPayload":{"helpUrl":"https://docs.github.com","findFileWorkerPath":"/assets-cdn/worker/find-file-worker-a007d7f370d6.js","findInFileWorkerPath":"/assets-cdn/worker/find-in-file-worker-d0f0ff069004.js","githubDevUrl":null,"enabled_features":{"code_nav_ui_events":false,"copilot_conversational_ux":false,"react_blob_overlay":false,"copilot_conversational_ux_embedding_update":false,"copilot_popover_file_editor_header":false,"copilot_smell_icebreaker_ux":true,"copilot_workspace":false,"overview_async_data_channel":false}}}}</script>
- <div data-target="react-partial.reactRoot"></div>
-</react-partial>
-
- <input type="hidden" data-csrf="true" value="h9JzF6nM3j1tCzNHf8P5t0b3htdxNM5MXqPykVVT5VBNv8zM9gGvghmh89+8gjVFe8m6DoGyDe1g4ytkm2R8tg==" />
-</div>
- <div data-view-component="true" class="Layout-sidebar">
-
- <div class="BorderGrid about-margin" data-pjax>
- <div class="BorderGrid-row">
- <div class="BorderGrid-cell">
- <div class="hide-sm hide-md">
- <h2 class="mb-3 h4">About</h2>
-
- <p class="f4 my-3">
- 🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX.
- </p>
- <div class="my-3 d-flex flex-items-center">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-link flex-shrink-0 mr-2">
- <path d="m7.775 3.275 1.25-1.25a3.5 3.5 0 1 1 4.95 4.95l-2.5 2.5a3.5 3.5 0 0 1-4.95 0 .751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018 1.998 1.998 0 0 0 2.83 0l2.5-2.5a2.002 2.002 0 0 0-2.83-2.83l-1.25 1.25a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042Zm-4.69 9.64a1.998 1.998 0 0 0 2.83 0l1.25-1.25a.751.751 0 0 1 1.042.018.751.751 0 0 1 .018 1.042l-1.25 1.25a3.5 3.5 0 1 1-4.95-4.95l2.5-2.5a3.5 3.5 0 0 1 4.95 0 .751.751 0 0 1-.018 1.042.751.751 0 0 1-1.042.018 1.998 1.998 0 0 0-2.83 0l-2.5 2.5a1.998 1.998 0 0 0 0 2.83Z"></path>
-</svg>
- <span class="flex-auto min-width-0 css-truncate css-truncate-target width-fit">
- <a title="https://huggingface.co/transformers" role="link" target="_blank" rel="noopener noreferrer nofollow" class="text-bold" href="https://huggingface.co/transformers">huggingface.co/transformers</a>
- </span>
- </div>
-
- <h3 class="sr-only">Topics</h3>
- <div class="my-3">
- <div class="f6">
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:python" href="/topics/python" title="Topic: python" data-view-component="true" class="topic-tag topic-tag-link">
- python
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:nlp" href="/topics/nlp" title="Topic: nlp" data-view-component="true" class="topic-tag topic-tag-link">
- nlp
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:machine-learning" href="/topics/machine-learning" title="Topic: machine-learning" data-view-component="true" class="topic-tag topic-tag-link">
- machine-learning
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:natural-language-processing" href="/topics/natural-language-processing" title="Topic: natural-language-processing" data-view-component="true" class="topic-tag topic-tag-link">
- natural-language-processing
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:deep-learning" href="/topics/deep-learning" title="Topic: deep-learning" data-view-component="true" class="topic-tag topic-tag-link">
- deep-learning
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:tensorflow" href="/topics/tensorflow" title="Topic: tensorflow" data-view-component="true" class="topic-tag topic-tag-link">
- tensorflow
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:pytorch" href="/topics/pytorch" title="Topic: pytorch" data-view-component="true" class="topic-tag topic-tag-link">
- pytorch
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:transformer" href="/topics/transformer" title="Topic: transformer" data-view-component="true" class="topic-tag topic-tag-link">
- transformer
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:speech-recognition" href="/topics/speech-recognition" title="Topic: speech-recognition" data-view-component="true" class="topic-tag topic-tag-link">
- speech-recognition
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:seq2seq" href="/topics/seq2seq" title="Topic: seq2seq" data-view-component="true" class="topic-tag topic-tag-link">
- seq2seq
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:flax" href="/topics/flax" title="Topic: flax" data-view-component="true" class="topic-tag topic-tag-link">
- flax
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:pretrained-models" href="/topics/pretrained-models" title="Topic: pretrained-models" data-view-component="true" class="topic-tag topic-tag-link">
- pretrained-models
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:language-models" href="/topics/language-models" title="Topic: language-models" data-view-component="true" class="topic-tag topic-tag-link">
- language-models
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:nlp-library" href="/topics/nlp-library" title="Topic: nlp-library" data-view-component="true" class="topic-tag topic-tag-link">
- nlp-library
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:language-model" href="/topics/language-model" title="Topic: language-model" data-view-component="true" class="topic-tag topic-tag-link">
- language-model
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:hacktoberfest" href="/topics/hacktoberfest" title="Topic: hacktoberfest" data-view-component="true" class="topic-tag topic-tag-link">
- hacktoberfest
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:bert" href="/topics/bert" title="Topic: bert" data-view-component="true" class="topic-tag topic-tag-link">
- bert
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:jax" href="/topics/jax" title="Topic: jax" data-view-component="true" class="topic-tag topic-tag-link">
- jax
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:pytorch-transformers" href="/topics/pytorch-transformers" title="Topic: pytorch-transformers" data-view-component="true" class="topic-tag topic-tag-link">
- pytorch-transformers
-</a>
- <a data-ga-click="Topic, repository page" data-octo-click="topic_click" data-octo-dimensions="topic:model-hub" href="/topics/model-hub" title="Topic: model-hub" data-view-component="true" class="topic-tag topic-tag-link">
- model-hub
-</a>
- </div>
-
- </div>
-
- <h3 class="sr-only">Resources</h3>
- <div class="mt-2">
- <a class="Link--muted" data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:readme&quot;}" href="#readme-ov-file">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-book mr-2">
- <path d="M0 1.75A.75.75 0 0 1 .75 1h4.253c1.227 0 2.317.59 3 1.501A3.743 3.743 0 0 1 11.006 1h4.245a.75.75 0 0 1 .75.75v10.5a.75.75 0 0 1-.75.75h-4.507a2.25 2.25 0 0 0-1.591.659l-.622.621a.75.75 0 0 1-1.06 0l-.622-.621A2.25 2.25 0 0 0 5.258 13H.75a.75.75 0 0 1-.75-.75Zm7.251 10.324.004-5.073-.002-2.253A2.25 2.25 0 0 0 5.003 2.5H1.5v9h3.757a3.75 3.75 0 0 1 1.994.574ZM8.755 4.75l-.004 7.322a3.752 3.752 0 0 1 1.992-.572H14.5v-9h-3.495a2.25 2.25 0 0 0-2.25 2.25Z"></path>
-</svg>
- Readme
-</a> </div>
-
-
- <h3 class="sr-only">License</h3>
- <div class="mt-2">
- <a href="#Apache-2.0-1-ov-file"
- class="Link--muted"
-
- data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:license&quot;}"
- >
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-law mr-2">
- <path d="M8.75.75V2h.985c.304 0 .603.08.867.231l1.29.736c.038.022.08.033.124.033h2.234a.75.75 0 0 1 0 1.5h-.427l2.111 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.006.005-.01.01-.045.04c-.21.176-.441.327-.686.45C14.556 10.78 13.88 11 13 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L12.178 4.5h-.162c-.305 0-.604-.079-.868-.231l-1.29-.736a.245.245 0 0 0-.124-.033H8.75V13h2.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1 0-1.5h2.5V3.5h-.984a.245.245 0 0 0-.124.033l-1.289.737c-.265.15-.564.23-.869.23h-.162l2.112 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.016.015-.045.04c-.21.176-.441.327-.686.45C4.556 10.78 3.88 11 3 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L2.178 4.5H1.75a.75.75 0 0 1 0-1.5h2.234a.249.249 0 0 0 .125-.033l1.288-.737c.265-.15.564-.23.869-.23h.984V.75a.75.75 0 0 1 1.5 0Zm2.945 8.477c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L13 6.327Zm-10 0c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L3 6.327Z"></path>
-</svg>
- Apache-2.0 license
- </a>
- </div>
-
-
- <h3 class="sr-only">Code of conduct</h3>
- <div class="mt-2">
- <a href="#coc-ov-file"
- class="Link--muted"
-
- data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:code of conduct&quot;}"
- >
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-code-of-conduct mr-2">
- <path d="M8.048 2.241c.964-.709 2.079-1.238 3.325-1.241a4.616 4.616 0 0 1 3.282 1.355c.41.408.757.86.996 1.428.238.568.348 1.206.347 1.968 0 2.193-1.505 4.254-3.081 5.862-1.496 1.526-3.213 2.796-4.249 3.563l-.22.163a.749.749 0 0 1-.895 0l-.221-.163c-1.036-.767-2.753-2.037-4.249-3.563C1.51 10.008.007 7.952.002 5.762a4.614 4.614 0 0 1 1.353-3.407C3.123.585 6.223.537 8.048 2.24Zm-1.153.983c-1.25-1.033-3.321-.967-4.48.191a3.115 3.115 0 0 0-.913 2.335c0 1.556 1.109 3.24 2.652 4.813C5.463 11.898 6.96 13.032 8 13.805c.353-.262.758-.565 1.191-.905l-1.326-1.223a.75.75 0 0 1 1.018-1.102l1.48 1.366c.328-.281.659-.577.984-.887L9.99 9.802a.75.75 0 1 1 1.019-1.103l1.384 1.28c.295-.329.566-.661.81-.995L12.92 8.7l-1.167-1.168c-.674-.671-1.78-.664-2.474.03-.268.269-.538.537-.802.797-.893.882-2.319.843-3.185-.032-.346-.35-.693-.697-1.043-1.047a.75.75 0 0 1-.04-1.016c.162-.191.336-.401.52-.623.62-.748 1.356-1.637 2.166-2.417Zm7.112 4.442c.313-.65.491-1.293.491-1.916v-.001c0-.614-.088-1.045-.23-1.385-.143-.339-.357-.633-.673-.949a3.111 3.111 0 0 0-2.218-.915c-1.092.003-2.165.627-3.226 1.602-.823.755-1.554 1.637-2.228 2.45l-.127.154.562.566a.755.755 0 0 0 1.066.02l.794-.79c1.258-1.258 3.312-1.31 4.594-.032.396.394.792.791 1.173 1.173Z"></path>
-</svg>
- Code of conduct
- </a>
- </div>
-
- <h3 class="sr-only">Security policy</h3>
- <div class="mt-2">
- <a href="#security-ov-file"
- class="Link--muted"
-
- data-analytics-event="{&quot;category&quot;:&quot;Repository Overview&quot;,&quot;action&quot;:&quot;click&quot;,&quot;label&quot;:&quot;location:sidebar;file:security policy&quot;}"
- >
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-law mr-2">
- <path d="M8.75.75V2h.985c.304 0 .603.08.867.231l1.29.736c.038.022.08.033.124.033h2.234a.75.75 0 0 1 0 1.5h-.427l2.111 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.006.005-.01.01-.045.04c-.21.176-.441.327-.686.45C14.556 10.78 13.88 11 13 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L12.178 4.5h-.162c-.305 0-.604-.079-.868-.231l-1.29-.736a.245.245 0 0 0-.124-.033H8.75V13h2.5a.75.75 0 0 1 0 1.5h-6.5a.75.75 0 0 1 0-1.5h2.5V3.5h-.984a.245.245 0 0 0-.124.033l-1.289.737c-.265.15-.564.23-.869.23h-.162l2.112 4.692a.75.75 0 0 1-.154.838l-.53-.53.529.531-.001.002-.002.002-.006.006-.016.015-.045.04c-.21.176-.441.327-.686.45C4.556 10.78 3.88 11 3 11a4.498 4.498 0 0 1-2.023-.454 3.544 3.544 0 0 1-.686-.45l-.045-.04-.016-.015-.006-.006-.004-.004v-.001a.75.75 0 0 1-.154-.838L2.178 4.5H1.75a.75.75 0 0 1 0-1.5h2.234a.249.249 0 0 0 .125-.033l1.288-.737c.265-.15.564-.23.869-.23h.984V.75a.75.75 0 0 1 1.5 0Zm2.945 8.477c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L13 6.327Zm-10 0c.285.135.718.273 1.305.273s1.02-.138 1.305-.273L3 6.327Z"></path>
-</svg>
- Security policy
- </a>
- </div>
-
- <include-fragment src="/huggingface/transformers/hovercards/citation/sidebar_partial?tree_name=main">
- </include-fragment>
-
- <div class="mt-2">
- <a href="/huggingface/transformers/activity" data-view-component="true" class="Link Link--muted">
- <svg text="gray" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-pulse mr-2">
- <path d="M6 2c.306 0 .582.187.696.471L10 10.731l1.304-3.26A.751.751 0 0 1 12 7h3.25a.75.75 0 0 1 0 1.5h-2.742l-1.812 4.528a.751.751 0 0 1-1.392 0L6 4.77 4.696 8.03A.75.75 0 0 1 4 8.5H.75a.75.75 0 0 1 0-1.5h2.742l1.812-4.529A.751.751 0 0 1 6 2Z"></path>
-</svg>
- <span class="color-fg-muted">Activity</span>
-</a> </div>
-
- <div class="mt-2">
- <a href="/huggingface/transformers/custom-properties" data-view-component="true" class="Link Link--muted">
- <svg text="gray" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-note mr-2">
- <path d="M0 3.75C0 2.784.784 2 1.75 2h12.5c.966 0 1.75.784 1.75 1.75v8.5A1.75 1.75 0 0 1 14.25 14H1.75A1.75 1.75 0 0 1 0 12.25Zm1.75-.25a.25.25 0 0 0-.25.25v8.5c0 .138.112.25.25.25h12.5a.25.25 0 0 0 .25-.25v-8.5a.25.25 0 0 0-.25-.25ZM3.5 6.25a.75.75 0 0 1 .75-.75h7a.75.75 0 0 1 0 1.5h-7a.75.75 0 0 1-.75-.75Zm.75 2.25h4a.75.75 0 0 1 0 1.5h-4a.75.75 0 0 1 0-1.5Z"></path>
-</svg>
- <span class="color-fg-muted">Custom properties</span>
-</a> </div>
-
- <h3 class="sr-only">Stars</h3>
- <div class="mt-2">
- <a href="/huggingface/transformers/stargazers" data-view-component="true" class="Link Link--muted">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-star mr-2">
- <path d="M8 .25a.75.75 0 0 1 .673.418l1.882 3.815 4.21.612a.75.75 0 0 1 .416 1.279l-3.046 2.97.719 4.192a.751.751 0 0 1-1.088.791L8 12.347l-3.766 1.98a.75.75 0 0 1-1.088-.79l.72-4.194L.818 6.374a.75.75 0 0 1 .416-1.28l4.21-.611L7.327.668A.75.75 0 0 1 8 .25Zm0 2.445L6.615 5.5a.75.75 0 0 1-.564.41l-3.097.45 2.24 2.184a.75.75 0 0 1 .216.664l-.528 3.084 2.769-1.456a.75.75 0 0 1 .698 0l2.77 1.456-.53-3.084a.75.75 0 0 1 .216-.664l2.24-2.183-3.096-.45a.75.75 0 0 1-.564-.41L8 2.694Z"></path>
-</svg>
- <strong>123k</strong>
- stars
-</a> </div>
-
- <h3 class="sr-only">Watchers</h3>
- <div class="mt-2">
- <a href="/huggingface/transformers/watchers" data-view-component="true" class="Link Link--muted">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-eye mr-2">
- <path d="M8 2c1.981 0 3.671.992 4.933 2.078 1.27 1.091 2.187 2.345 2.637 3.023a1.62 1.62 0 0 1 0 1.798c-.45.678-1.367 1.932-2.637 3.023C11.67 13.008 9.981 14 8 14c-1.981 0-3.671-.992-4.933-2.078C1.797 10.83.88 9.576.43 8.898a1.62 1.62 0 0 1 0-1.798c.45-.677 1.367-1.931 2.637-3.022C4.33 2.992 6.019 2 8 2ZM1.679 7.932a.12.12 0 0 0 0 .136c.411.622 1.241 1.75 2.366 2.717C5.176 11.758 6.527 12.5 8 12.5c1.473 0 2.825-.742 3.955-1.715 1.124-.967 1.954-2.096 2.366-2.717a.12.12 0 0 0 0-.136c-.412-.621-1.242-1.75-2.366-2.717C10.824 4.242 9.473 3.5 8 3.5c-1.473 0-2.825.742-3.955 1.715-1.124.967-1.954 2.096-2.366 2.717ZM8 10a2 2 0 1 1-.001-3.999A2 2 0 0 1 8 10Z"></path>
-</svg>
- <strong>1.1k</strong>
- watching
-</a> </div>
-
- <h3 class="sr-only">Forks</h3>
- <div class="mt-2">
- <a href="/huggingface/transformers/forks" data-view-component="true" class="Link Link--muted">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-repo-forked mr-2">
- <path d="M5 5.372v.878c0 .414.336.75.75.75h4.5a.75.75 0 0 0 .75-.75v-.878a2.25 2.25 0 1 1 1.5 0v.878a2.25 2.25 0 0 1-2.25 2.25h-1.5v2.128a2.251 2.251 0 1 1-1.5 0V8.5h-1.5A2.25 2.25 0 0 1 3.5 6.25v-.878a2.25 2.25 0 1 1 1.5 0ZM5 3.25a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Zm6.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Zm-3 8.75a.75.75 0 1 0-1.5 0 .75.75 0 0 0 1.5 0Z"></path>
-</svg>
- <strong>24.5k</strong>
- forks
-</a> </div>
-
- <div class="mt-2">
- <a class="Link--muted" href="/contact/report-content?content_url=https%3A%2F%2Fgithub.com%2Fhuggingface%2Ftransformers&amp;report=huggingface+%28user%29">
- Report repository
-</a> </div>
-</div>
-
- </div>
- </div>
-
-
- <div class="BorderGrid-row">
- <div class="BorderGrid-cell">
- <h2 class="h4 mb-3" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame">
- <a href="/huggingface/transformers/releases" data-view-component="true" class="Link--primary no-underline Link">
- Releases
- <span title="145" data-view-component="true" class="Counter">145</span>
-</a></h2>
-
- <a class="Link--primary d-flex no-underline" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" href="/huggingface/transformers/releases/tag/v4.39.2">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-tag flex-shrink-0 mt-1 color-fg-success">
- <path d="M1 7.775V2.75C1 1.784 1.784 1 2.75 1h5.025c.464 0 .91.184 1.238.513l6.25 6.25a1.75 1.75 0 0 1 0 2.474l-5.026 5.026a1.75 1.75 0 0 1-2.474 0l-6.25-6.25A1.752 1.752 0 0 1 1 7.775Zm1.5 0c0 .066.026.13.073.177l6.25 6.25a.25.25 0 0 0 .354 0l5.025-5.025a.25.25 0 0 0 0-.354l-6.25-6.25a.25.25 0 0 0-.177-.073H2.75a.25.25 0 0 0-.25.25ZM6 5a1 1 0 1 1 0 2 1 1 0 0 1 0-2Z"></path>
-</svg>
- <div class="ml-2 min-width-0">
- <div class="d-flex">
- <span class="css-truncate css-truncate-target text-bold mr-2" style="max-width: none;">Patch release v4.39.2</span>
- <span title="Label: Latest" data-view-component="true" class="Label Label--success flex-shrink-0">
- Latest
-</span> </div>
- <div class="text-small color-fg-muted"><relative-time datetime="2024-03-28T17:36:27Z" class="no-wrap">Mar 28, 2024</relative-time></div>
- </div>
-</a> <div data-view-component="true" class="mt-3">
- <a text="small" data-pjax="#repo-content-pjax-container" data-turbo-frame="repo-content-turbo-frame" href="/huggingface/transformers/releases" data-view-component="true" class="Link">
- + 144 releases
-</a></div>
- </div>
- </div>
-
-
-
- <div class="BorderGrid-row">
- <div class="BorderGrid-cell">
- <h2 class="h4 mb-3">
- <a href="/orgs/huggingface/packages?repo_name=transformers" data-view-component="true" class="Link--primary no-underline Link d-flex flex-items-center">
- Packages
- <span title="0" hidden="hidden" data-view-component="true" class="Counter ml-1">0</span>
-</a></h2>
-
-
- <div class="text-small color-fg-muted">
- No packages published <br>
- </div>
-
-
-
- </div>
- </div>
-
-
- <div class="BorderGrid-row" hidden>
- <div class="BorderGrid-cell">
- <include-fragment src="/huggingface/transformers/used_by_list" accept="text/fragment+html">
-</include-fragment>
- </div>
- </div>
-
-
- <div class="BorderGrid-row">
- <div class="BorderGrid-cell">
- <h2 class="h4 mb-3">
- <a href="/huggingface/transformers/graphs/contributors" data-view-component="true" class="Link--primary no-underline Link d-flex flex-items-center">
- Contributors
- <span title="2,419" data-view-component="true" class="Counter ml-1">2,419</span>
-</a></h2>
-
-
-
- <ul class="list-style-none d-flex flex-wrap mb-n2">
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/sgugger"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/sgugger/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/35901082?s=64&amp;v=4" alt="@sgugger" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/thomwolf"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/thomwolf/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/7353373?s=64&amp;v=4" alt="@thomwolf" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/LysandreJik"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/LysandreJik/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/30755778?s=64&amp;v=4" alt="@LysandreJik" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/ydshieh"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/ydshieh/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/2521628?s=64&amp;v=4" alt="@ydshieh" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/patrickvonplaten"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/patrickvonplaten/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/23423619?s=64&amp;v=4" alt="@patrickvonplaten" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/stas00"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/stas00/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/10676103?s=64&amp;v=4" alt="@stas00" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/julien-c"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/julien-c/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/326577?s=64&amp;v=4" alt="@julien-c" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/NielsRogge"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/NielsRogge/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/48327001?s=64&amp;v=4" alt="@NielsRogge" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/ArthurZucker"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/ArthurZucker/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/48595927?s=64&amp;v=4" alt="@ArthurZucker" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/gante"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/gante/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/12240844?s=64&amp;v=4" alt="@gante" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/amyeroberts"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/amyeroberts/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/22614925?s=64&amp;v=4" alt="@amyeroberts" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/Rocketknight1"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/Rocketknight1/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/12866554?s=64&amp;v=4" alt="@Rocketknight1" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/younesbelkada"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/younesbelkada/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/49240599?s=64&amp;v=4" alt="@younesbelkada" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
- <li class="mb-2 mr-2"
- >
- <a href="https://github.com/patil-suraj"
- class=""
- data-hovercard-type="user" data-hovercard-url="/users/patil-suraj/hovercard" data-octo-click="hovercard-link-click" data-octo-dimensions="link_type:self"
-
- >
- <img src="https://avatars.githubusercontent.com/u/27137566?s=64&amp;v=4" alt="@patil-suraj" size="32" height="32" width="32" data-view-component="true" class="avatar circle" />
- </a>
- </li>
-</ul>
-
-
-
-
- <div data-view-component="true" class="mt-3">
- <a text="small" href="/huggingface/transformers/graphs/contributors" data-view-component="true" class="Link--inTextBlock Link">
- + 2,405 contributors
-</a></div>
- </div>
- </div>
-
-
-
- <div class="BorderGrid-row">
- <div class="BorderGrid-cell">
- <h2 class="h4 mb-3">Languages</h2>
-<div class="mb-2">
- <span data-view-component="true" class="Progress">
- <span style="background-color:#3572A5 !important;;width: 99.3%;" itemprop="keywords" aria-label="Python 99.3" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span>
- <span style="background-color:#3A4E3A !important;;width: 0.6%;" itemprop="keywords" aria-label="Cuda 0.6" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span>
- <span style="background-color:#89e051 !important;;width: 0.1%;" itemprop="keywords" aria-label="Shell 0.1" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span>
- <span style="background-color:#f34b7d !important;;width: 0.0%;" itemprop="keywords" aria-label="C++ 0.0" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span>
- <span style="background-color:#384d54 !important;;width: 0.0%;" itemprop="keywords" aria-label="Dockerfile 0.0" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span>
- <span style="background-color:#555555 !important;;width: 0.0%;" itemprop="keywords" aria-label="C 0.0" data-view-component="true" class="Progress-item color-bg-success-emphasis"></span>
-</span></div>
-<ul class="list-style-none">
- <li class="d-inline">
- <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/huggingface/transformers/search?l=python" data-ga-click="Repository, language stats search click, location:repo overview">
- <svg style="color:#3572A5;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
- <span class="color-fg-default text-bold mr-1">Python</span>
- <span>99.3%</span>
- </a>
- </li>
- <li class="d-inline">
- <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/huggingface/transformers/search?l=cuda" data-ga-click="Repository, language stats search click, location:repo overview">
- <svg style="color:#3A4E3A;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
- <span class="color-fg-default text-bold mr-1">Cuda</span>
- <span>0.6%</span>
- </a>
- </li>
- <li class="d-inline">
- <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/huggingface/transformers/search?l=shell" data-ga-click="Repository, language stats search click, location:repo overview">
- <svg style="color:#89e051;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
- <span class="color-fg-default text-bold mr-1">Shell</span>
- <span>0.1%</span>
- </a>
- </li>
- <li class="d-inline">
- <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/huggingface/transformers/search?l=c%2B%2B" data-ga-click="Repository, language stats search click, location:repo overview">
- <svg style="color:#f34b7d;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
- <span class="color-fg-default text-bold mr-1">C++</span>
- <span>0.0%</span>
- </a>
- </li>
- <li class="d-inline">
- <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/huggingface/transformers/search?l=dockerfile" data-ga-click="Repository, language stats search click, location:repo overview">
- <svg style="color:#384d54;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
- <span class="color-fg-default text-bold mr-1">Dockerfile</span>
- <span>0.0%</span>
- </a>
- </li>
- <li class="d-inline">
- <a class="d-inline-flex flex-items-center flex-nowrap Link--secondary no-underline text-small mr-3" href="/huggingface/transformers/search?l=c" data-ga-click="Repository, language stats search click, location:repo overview">
- <svg style="color:#555555;" aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-dot-fill mr-2">
- <path d="M8 4a4 4 0 1 1 0 8 4 4 0 0 1 0-8Z"></path>
-</svg>
- <span class="color-fg-default text-bold mr-1">C</span>
- <span>0.0%</span>
- </a>
- </li>
-</ul>
-
- </div>
- </div>
-
- </div>
-</div>
-
-</div></div>
-
- </div>
-
-
- </div>
-
-</turbo-frame>
-
-
- </main>
- </div>
-
- </div>
-
- <footer class="footer pt-8 pb-6 f6 color-fg-muted p-responsive" role="contentinfo" >
- <h2 class='sr-only'>Footer</h2>
-
-
-
-
- <div class="d-flex flex-justify-center flex-items-center flex-column-reverse flex-lg-row flex-wrap flex-lg-nowrap">
- <div class="d-flex flex-items-center flex-shrink-0 mx-2">
- <a aria-label="Homepage" title="GitHub" class="footer-octicon mr-2" href="https://github.com">
- <svg aria-hidden="true" height="24" viewBox="0 0 16 16" version="1.1" width="24" data-view-component="true" class="octicon octicon-mark-github">
- <path d="M8 0c4.42 0 8 3.58 8 8a8.013 8.013 0 0 1-5.45 7.59c-.4.08-.55-.17-.55-.38 0-.27.01-1.13.01-2.2 0-.75-.25-1.23-.54-1.48 1.78-.2 3.65-.88 3.65-3.95 0-.88-.31-1.59-.82-2.15.08-.2.36-1.02-.08-2.12 0 0-.67-.22-2.2.82-.64-.18-1.32-.27-2-.27-.68 0-1.36.09-2 .27-1.53-1.03-2.2-.82-2.2-.82-.44 1.1-.16 1.92-.08 2.12-.51.56-.82 1.28-.82 2.15 0 3.06 1.86 3.75 3.64 3.95-.23.2-.44.55-.51 1.07-.46.21-1.61.55-2.33-.66-.15-.24-.6-.83-1.23-.82-.67.01-.27.38.01.53.34.19.73.9.82 1.13.16.45.68 1.31 2.69.94 0 .67.01 1.3.01 1.49 0 .21-.15.45-.55.38A7.995 7.995 0 0 1 0 8c0-4.42 3.58-8 8-8Z"></path>
-</svg>
-</a>
- <span>
- &copy; 2024 GitHub,&nbsp;Inc.
- </span>
- </div>
-
- <nav aria-label="Footer">
- <h3 class="sr-only" id="sr-footer-heading">Footer navigation</h3>
-
- <ul class="list-style-none d-flex flex-justify-center flex-wrap mb-2 mb-lg-0" aria-labelledby="sr-footer-heading">
-
- <li class="mx-2">
- <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to Terms&quot;,&quot;label&quot;:&quot;text:terms&quot;}" href="https://docs.github.com/site-policy/github-terms/github-terms-of-service" data-view-component="true" class="Link--secondary Link">Terms</a>
- </li>
-
- <li class="mx-2">
- <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to privacy&quot;,&quot;label&quot;:&quot;text:privacy&quot;}" href="https://docs.github.com/site-policy/privacy-policies/github-privacy-statement" data-view-component="true" class="Link--secondary Link">Privacy</a>
- </li>
-
- <li class="mx-2">
- <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to security&quot;,&quot;label&quot;:&quot;text:security&quot;}" href="/security" data-view-component="true" class="Link--secondary Link">Security</a>
- </li>
-
- <li class="mx-2">
- <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to status&quot;,&quot;label&quot;:&quot;text:status&quot;}" href="https://www.githubstatus.com/" data-view-component="true" class="Link--secondary Link">Status</a>
- </li>
-
- <li class="mx-2">
- <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to docs&quot;,&quot;label&quot;:&quot;text:docs&quot;}" href="https://docs.github.com/" data-view-component="true" class="Link--secondary Link">Docs</a>
- </li>
-
- <li class="mx-2">
- <a data-analytics-event="{&quot;category&quot;:&quot;Footer&quot;,&quot;action&quot;:&quot;go to contact&quot;,&quot;label&quot;:&quot;text:contact&quot;}" href="https://support.github.com?tags=dotcom-footer" data-view-component="true" class="Link--secondary Link">Contact</a>
- </li>
-
- <li class="mx-2" >
- <cookie-consent-link>
- <button type="button" class="Link--secondary underline-on-hover border-0 p-0 color-bg-transparent" data-action="click:cookie-consent-link#showConsentManagement">
- Manage cookies
- </button>
- </cookie-consent-link>
-</li>
-
-<li class="mx-2">
- <cookie-consent-link>
- <button type="button" class="Link--secondary underline-on-hover border-0 p-0 color-bg-transparent" data-action="click:cookie-consent-link#showConsentManagement">
- Do not share my personal information
- </button>
- </cookie-consent-link>
-</li>
-
- </ul>
- </nav>
- </div>
-</footer>
-
-
-
-
- <cookie-consent id="cookie-consent-banner" class="position-fixed bottom-0 left-0" style="z-index: 999999" data-initial-cookie-consent-allowed="" data-cookie-consent-required="true"></cookie-consent>
-
-
- <div id="ajax-error-message" class="ajax-error-message flash flash-error" hidden>
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-alert">
- <path d="M6.457 1.047c.659-1.234 2.427-1.234 3.086 0l6.082 11.378A1.75 1.75 0 0 1 14.082 15H1.918a1.75 1.75 0 0 1-1.543-2.575Zm1.763.707a.25.25 0 0 0-.44 0L1.698 13.132a.25.25 0 0 0 .22.368h12.164a.25.25 0 0 0 .22-.368Zm.53 3.996v2.5a.75.75 0 0 1-1.5 0v-2.5a.75.75 0 0 1 1.5 0ZM9 11a1 1 0 1 1-2 0 1 1 0 0 1 2 0Z"></path>
-</svg>
- <button type="button" class="flash-close js-ajax-error-dismiss" aria-label="Dismiss error">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x">
- <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
- </button>
- You can’t perform that action at this time.
- </div>
-
- <template id="site-details-dialog">
- <details class="details-reset details-overlay details-overlay-dark lh-default color-fg-default hx_rsm" open>
- <summary role="button" aria-label="Close dialog"></summary>
- <details-dialog class="Box Box--overlay d-flex flex-column anim-fade-in fast hx_rsm-dialog hx_rsm-modal">
- <button class="Box-btn-octicon m-0 btn-octicon position-absolute right-0 top-0" type="button" aria-label="Close dialog" data-close-dialog>
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-x">
- <path d="M3.72 3.72a.75.75 0 0 1 1.06 0L8 6.94l3.22-3.22a.749.749 0 0 1 1.275.326.749.749 0 0 1-.215.734L9.06 8l3.22 3.22a.749.749 0 0 1-.326 1.275.749.749 0 0 1-.734-.215L8 9.06l-3.22 3.22a.751.751 0 0 1-1.042-.018.751.751 0 0 1-.018-1.042L6.94 8 3.72 4.78a.75.75 0 0 1 0-1.06Z"></path>
-</svg>
- </button>
- <div class="octocat-spinner my-6 js-details-dialog-spinner"></div>
- </details-dialog>
- </details>
-</template>
-
- <div class="Popover js-hovercard-content position-absolute" style="display: none; outline: none;" tabindex="0">
- <div class="Popover-message Popover-message--bottom-left Popover-message--large Box color-shadow-large" style="width:360px;">
- </div>
-</div>
-
- <template id="snippet-clipboard-copy-button">
- <div class="zeroclipboard-container position-absolute right-0 top-0">
- <clipboard-copy aria-label="Copy" class="ClipboardButton btn js-clipboard-copy m-2 p-0 tooltipped-no-delay" data-copy-feedback="Copied!" data-tooltip-direction="w">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy js-clipboard-copy-icon m-2">
- <path d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 0 1 0 1.5h-1.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-1.5a.75.75 0 0 1 1.5 0v1.5A1.75 1.75 0 0 1 9.25 16h-7.5A1.75 1.75 0 0 1 0 14.25Z"></path><path d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0 1 14.25 11h-7.5A1.75 1.75 0 0 1 5 9.25Zm1.75-.25a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Z"></path>
-</svg>
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check js-clipboard-check-icon color-fg-success d-none m-2">
- <path d="M13.78 4.22a.75.75 0 0 1 0 1.06l-7.25 7.25a.75.75 0 0 1-1.06 0L2.22 9.28a.751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018L6 10.94l6.72-6.72a.75.75 0 0 1 1.06 0Z"></path>
-</svg>
- </clipboard-copy>
- </div>
-</template>
-<template id="snippet-clipboard-copy-button-unpositioned">
- <div class="zeroclipboard-container">
- <clipboard-copy aria-label="Copy" class="ClipboardButton btn btn-invisible js-clipboard-copy m-2 p-0 tooltipped-no-delay d-flex flex-justify-center flex-items-center" data-copy-feedback="Copied!" data-tooltip-direction="w">
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-copy js-clipboard-copy-icon">
- <path d="M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 0 1 0 1.5h-1.5a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-1.5a.75.75 0 0 1 1.5 0v1.5A1.75 1.75 0 0 1 9.25 16h-7.5A1.75 1.75 0 0 1 0 14.25Z"></path><path d="M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0 1 14.25 11h-7.5A1.75 1.75 0 0 1 5 9.25Zm1.75-.25a.25.25 0 0 0-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 0 0 .25-.25v-7.5a.25.25 0 0 0-.25-.25Z"></path>
-</svg>
- <svg aria-hidden="true" height="16" viewBox="0 0 16 16" version="1.1" width="16" data-view-component="true" class="octicon octicon-check js-clipboard-check-icon color-fg-success d-none">
- <path d="M13.78 4.22a.75.75 0 0 1 0 1.06l-7.25 7.25a.75.75 0 0 1-1.06 0L2.22 9.28a.751.751 0 0 1 .018-1.042.751.751 0 0 1 1.042-.018L6 10.94l6.72-6.72a.75.75 0 0 1 1.06 0Z"></path>
-</svg>
- </clipboard-copy>
- </div>
-</template>
-
-
-
-
- </div>
-
- <div id="js-global-screen-reader-notice" class="sr-only" aria-live="polite" aria-atomic="true" ></div>
- <div id="js-global-screen-reader-notice-assertive" class="sr-only" aria-live="assertive" aria-atomic="true"></div>
- </body>
-</html>
-
diff --git a/transformers.spec b/transformers.spec
index 9dd3f8d..5bb5991 100644
--- a/transformers.spec
+++ b/transformers.spec
@@ -3,8 +3,8 @@ Version: 4.39.0
Release: 1%{?dist}
Summary: Transformers provides thousands of pretrained models to perform tasks on different modalities.
License: Apache-2.0
-URL: https://huggingface.co/docs/transformers/index
-Source0: https://github.com/huggingface/transformers.git
+URL: https://huggingface.co/docs/transformers/index
+Source0: https://github.com/huggingface/transformers/archive/refs/tags/v%{version}.tar.gz
Requires: python3-filelock
Requires: python3-packaging
@@ -27,7 +27,6 @@ BuildRequires: python3-devel
BuildRequires: python3-setuptools
BuildRequires: python3-pip
BuildRequires: python3-wheel
-BuildRequires: python3-hatchling
%description -n python3-transformers
python3-transformers provides APIs for transformers realize.
@@ -42,6 +41,7 @@ description for the doc of the python-transformers
%prep
%autosetup -p1 -n transformers-%{version}
+pip install hatchling
%build
%pyproject_build