
    bi             )          d Z ddlZddlZddlZddlZddlZddlZddlmZ ddl	m
Z
mZ ddlmZmZ ddlmZ ddlmZmZmZ ddlZddlZddlZddlmZ dd	lmZmZmZ dd
lmZm Z m!Z!m"Z"m#Z#m$Z$m%Z% ddl&m'Z'm(Z( ddl)m*Z* ddl+m,Z,m-Z- ddl.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4 ddl5m6Z6m7Z7 ddl8m9Z9 ddl:m;Z; ddl<m=Z=m>Z>m?Z?m@Z@ ddlAmBZBmCZC ddlDmEZE ddlFmGZG ddlHmIZI ddlJmKZKmLZL ddlMmNZN ddlOmPZPmQZQ ddlRmSZSmTZTmUZUmVZV ddlWmXZX ddlYmZZZ ddl[m\Z\ ddl]m^Z^m_Z_m`Z`maZambZb dd lcmdZd dd!lemfZfmgZg dd"lhmiZi dd#ljmkZk dd$llmmZm dd%lnmoZo  eiep      Zq er eSj                               d&gz   Zt G d' d(      Zud)eve-   d*ere,   d+eew   d,ewd-eve-   f
d.Zxd-eeve-      fd/Zy	 dbd0d1d,eew   d-eve-   fd2Zzd3ewfd4Z{	 dbd5e0d6ee9   d-e|eew   e}f   fd7Z~	 dbd5e0d6ee9   d-e|eew   e}f   fd8Z	 dcd9e/d:eew   d6ee9   d-e|eew   e}ewef   f   fd;Z	 	 	 ddd<ewd=ekd>eew   d?e}ewef   d6ee9   d-e|ere,   ewf   fd@Ze G dA dB             Ze G dC d1             Z G dD dE      Z G dF dGe      Z G dH dIe      Z G dJ dKe      Z G dL dMe      Z G dN dOe      Z	 	 	 	 	 	 ded:ewdPeeeweof      d6ee9   dQeee;ewf      dReew   d9eee}erewe/f      dSeew   d-efdTZ	 	 	 	 	 	 	 	 	 	 dfd:ewd3eew   dReew   d9eeeweew   e
eweeweew   f   f   f      dSeew   dUeeE   d6ee9   dQeee;ewf      dPeeeweof      dVeeeewf      dWee}   d-e-fdXZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dgd:ewd3eew   dReew   d9eeeweew   e
eweeweew   f   f   f      dYeeeweZerew   ereZ   f      dSeew   dUeeE   d6ee9   dQeee;ewf      dZeeefewf      d[ee   d\edPeeeweof      dVeeeewf      d]ed^ee   dWee}   d-ee6e*e7eNf   f$d_Z	 dcd`emd[ee   dWee}   d-ee*e6f   fdaZy)hzAccess datasets.    N)Counter)MappingSequence)	dataclassfield)Path)AnyOptionalUnion)	url_to_fs)DatasetCardDatasetCardDataHfApi)EntryNotFoundErrorGatedRepoErrorLocalEntryNotFoundErrorOfflineModeIsEnabledRepositoryNotFoundErrorRevisionNotFoundErrorget_session   )__version__config)Dataset)BuilderConfigDatasetBuilder)DataFilesDictDataFilesListDataFilesPatternsDictEmptyDatasetErrorget_data_patternssanitize_patterns)DatasetDictIterableDatasetDict)DownloadConfig)DownloadMode)StreamingDownloadManager	xbasenamexglobxjoin)DataFilesNotFoundErrorDatasetNotFoundError)Features)%_fix_for_backward_compatible_features)Hasher)DatasetInfoDatasetInfosDict)IterableDataset)camelcase_to_snakecasesnakecase_to_camelcase)_EXTENSION_TO_MODULE_MODULE_TO_EXTENSIONS_MODULE_TO_METADATA_FILE_NAMES_PACKAGED_DATASETS_MODULES)FolderBasedBuilder)Split)_dataset_viewer)!_raise_if_offline_mode_is_enabledcached_pathget_datasets_user_agentis_relative_pathrelative_to_absolute_path)hf_dataset_url)VerificationModeis_small_dataset)
get_logger)MetadataConfigs)PathLike)Version.zipc                       e Zd ZdZd Zy)#_InitializeConfiguredDatasetBuilderaL  
    From https://stackoverflow.com/questions/4647566/pickle-a-dynamically-parameterized-sub-class
    See also ConfiguredDatasetBuilder.__reduce__
    When called with the param value as the only argument, returns an
    un-initialized instance of the parameterized class. Subsequent __setstate__
    will be called by pickle.
    c                 B    t               }t        ||||      |_        |S )N)default_config_namedataset_name)rJ   configure_builder_class	__class__)selfbuilder_clsmetadata_configsrL   nameobjs         H/home/cdr/jupyterlab/.venv/lib/python3.12/site-packages/datasets/load.py__call__z,_InitializeConfiguredDatasetBuilder.__call__i   s)    13/)?Rae
 
    N)__name__
__module____qualname____doc__rV    rW   rU   rJ   rJ   `   s    rW   rJ   rQ   builder_configsrL   rM   returnc                      G  fdd       } j                   j                         j                          t        |       |_          j                   j                         j                          t        |       |_        |S )z
    Dynamically create a builder class with custom builder configs parsed from README.md file,
    i.e. set BUILDER_CONFIGS class variable of a builder class to custom configs list.
    c                   :    e Zd ZW ZW ZW  j                  Zd Zy)9configure_builder_class.<locals>.ConfiguredDatasetBuilderc                     | j                   j                  d   }t               || j                  | j                  | j
                  f| j                  j                         fS )Nr   )rO   __mro__rJ   BUILDER_CONFIGSDEFAULT_CONFIG_NAMErM   __dict__copy)rP   parent_builder_clss     rU   
__reduce__zDconfigure_builder_class.<locals>.ConfiguredDatasetBuilder.__reduce__   s\    !%!7!7!:35&((,,%%	 ""$	 	rW   N)rX   rY   rZ   rd   re   ri   )rQ   r]   rL   s   rU   ConfiguredDatasetBuilderra   }   s    )1 ++
	rW   rj   )rX   lower
capitalizer4   rZ   )rQ   r]   rL   rM   rj   s   ```  rU   rN   rN   r   s     ; ( %%'224
56L\6Z5[\ % %%'224
56L\6Z5[\ ) $#rW   c                 6   t        j                  |       }d}|j                  j                         D ]d  \  }}t	        j
                  |      st        |t              s-t	        j                  |      rC|}t	        j                  |      }|]||k(  sc |S  |S )zJImport a module at module_path and return its main class: a DatasetBuilderN)
	importlibimport_modulerf   itemsinspectisclass
issubclassr   
isabstract	getmodule)module_pathmodulemodule_main_clsrS   rT   
obj_modules         rU   import_main_classrz      s    $$[1FO__**, 	c??3JsN$C!!#&!O **3/J%&J*> rW   dataset_moduleDatasetModulec                 "   t        | j                        }| j                  j                  rc|xs | j                  j                  d      }|t        d      t        || j                  j                  | j                  j                  |      }|S )NrM   z-dataset_name should be specified but got None)r]   rL   rM   )	rz   rv   builder_configs_parametersr]   builder_kwargsget
ValueErrorrN   rL   )r{   rM   rQ   s      rU   get_dataset_builder_classr      s     $N$>$>?K00@@#X~'D'D'H'H'XLMM-*EEUU . I I ] ]%	
 rW   rS   c                     t         j                  s\t         j                  rK	 t               j	                  dj                  t         j                  | | dz   f      dt               id       yyy# t        $ r Y yw xY w)z'Update the download count of a dataset./.pyz
User-Agent   )headerstimeoutN)	r   HF_HUB_OFFLINEHF_UPDATE_DOWNLOAD_COUNTSr   headjoinS3_DATASETS_BUCKET_PREFIXr>   	Exception)rS   s    rU   increase_load_countr      sv      V%E%E	M&::D$,OP%'>'@A   &F   		s   A	A. .	A:9A:data_files_listdownload_configc                 ^   t        d | dt        j                   D              }|rdt        t        t        t
        f   t        f   dt        t        t
        f   fd}t        |j                         |d      D ]/  \  \  }}}|t        v rt        |   c S |dk(  s"t        | |	      c S  di fS )
a%  Infer module (and builder kwargs) from list of data files.

    It picks the module based on the most common file extension.
    In case of a draw ".parquet" is the favorite, and then alphabetical order.

    Args:
        data_files_list (DataFilesList): List of data files.
        download_config (bool or str, optional): Mainly use `token` or `storage_options` to support different platforms and auth types.

    Returns:
        tuple[str, dict[str, Any]]: Tuple with
            - inferred module name
            - dict of builder kwargs
    c              3      K   | ]T  }t        |      j                  d       dd D ]2  }d |j                         z   t        |      t        j                  v f 4 V yw.r   N)r(   splitrk   r9   METADATA_FILENAMES.0filepathsuffixs      rU   	<genexpr>z3infer_module_for_data_files_list.<locals>.<genexpr>   se      !)//4QR8!  
v||~	y26H6[6[[\!\!s   AAN	ext_countr^   c                 >    | \  \  }}}| ||dk(  |dk(  |dk(  |dk(  |fS )z]Sort by count and set ".parquet" as the favorite in case of a draw, and ignore metadata filesz.parquetz.jsonlz.jsonz.csvr\   )r   extis_metadatacounts       rU   sort_keyz2infer_module_for_data_files_list.<locals>.sort_key   s@    (1%S+#OUC:,=shPSW^P^`cgm`morssrW   T)keyreverserH   r   )r   r   *DATA_FILES_MAX_NUMBER_FOR_MODULE_INFERENCEtuplestrboolintsortedrp   r5   ,infer_module_for_data_files_list_in_archives)r   r   extensions_counterr   r   _s         rU    infer_module_for_data_files_listr      s    " ! !'([&*[*[\! 
 	teCI&6&; < 	tsDyAQ 	t
 ""4":":"<(TXY 	vKHS!a**+C00COetuu		v
 8OrW   c                    g }d}| D ]  }t        |      j                  d      s|dz  }|t        j                  kD  r ngt	        t               j                  |      d      }|t        |d|      dt        j                   D cg c]  }|j                  d      d    c}z  } t        d	 |D              }|r(|j                  d      d   d   }|t        v r	t        |   S di fS c c}w )
a  Infer module (and builder kwargs) from list of archive data files.

    Args:
        data_files_list (DataFilesList): List of data files.
        download_config (bool or str, optional): Mainly use `token` or `storage_options` to support different platforms and auth types.

    Returns:
        tuple[str, dict[str, Any]]: Tuple with
            - inferred module name
            - dict of builder kwargs
    r   rH   r   z**T)	recursiver   Nz::c              3      K   | ]9  }t        |      j                  d       dd D ]  }d |j                         z     ; ywr   )r(   r   rk   r   s      rU   r   z?infer_module_for_data_files_list_in_archives.<locals>.<genexpr>
  sM      !!))T\J]JcJcdgJhijikJl!@Fflln!!s   ?A)r   endswithr   2GLOBBED_DATA_FILES_MAX_NUMBER_FOR_MODULE_INFERENCEr*   r'   extractr)   3ARCHIVED_DATA_FILES_MAX_NUMBER_FOR_MODULE_INFERENCEr   r   most_commonr5   )	r   r   archived_filesarchive_files_counterr   	extractedfr   r   s	            rU   r   r      s    N# x=!!&)!Q&!$v'`'``68@@JDQIyD/ZPfPP a  N ! !-;!  (44Q7:1=..'448Os   C&
data_filespathc           	      \   | j                         D ci c]  \  }}|t        ||       }}}t        t        |j	                                     \  t        fd|j	                         D              rt        d|       st        d|rd| z         dz         fS c c}}w )a$  Infer module (and builder kwargs) from data files. Raise if module names for different splits don't match.

    Args:
        data_files ([`DataFilesDict`]): Dict of list of data files.
        path (str, *optional*): Dataset name or path.
        download_config ([`DownloadConfig`], *optional*):
            Specific download configuration parameters to authenticate on the Hugging Face Hub for private remote files.

    Returns:
        tuple[str, dict[str, Any]]: Tuple with
            - inferred module name
            - builder kwargs
    r   c              3   ,   K   | ]  }f|k7    y wNr\   )r   split_moduledefault_builder_kwargsmodule_names     rU   r   z.infer_module_for_data_files.<locals>.<genexpr>)  s     
l\K/0L@
ls   z=Couldn't infer the same data file format for all splits. Got zNo (supported) data files foundz in  )rp   r   nextitervaluesanyr   r+   )r   r   r   r   r   split_modulesr   r   s         @@rU   infer_module_for_data_filesr     s    $ '1&6&6&8"E? 	/Q`aaM  +/tM4H4H4J/K*L'K'

lUbUiUiUk
llXYfXghii$%F[_DQUPV-%hiieg%hii...s   B(rv   rR   	base_pathr   c                    t        |       }|j                  }|j                         }g }|i n|}||nd}|j                         D ]  \  }	}
|
j	                  d      }|
j	                  d      }|rt        ||      n|}	 |t        |      nt        ||      }t        j                  |t              }|
D cg c]  }t        ||      r|d	k7  s| }}|rt        j                  d
| d       |j                   |d|	||di ||
j                         D ci c]  \  }}t        ||      r|dvr|| c}}        ||fS # t        $ r}t        d| d|	 d      |d }~ww xY wc c}w c c}}w )Nr   r   data_dirr   )allowed_extensionszDataset at 'z?' doesn't contain data files matching the patterns for config 'zZ', check `data_files` and `data_fir` parameters in the `configs` YAML field in README.md. defaultz#Some datasets params were ignored: zx. Make sure to use only valid params for the dataset builder and to have a up-to-date version of the `datasets` library.)rS   r   r   )r   r   r   r\   )rz   BUILDER_CONFIG_CLASSget_default_config_namerp   r   r*   r"   r!   r   from_patternsALL_ALLOWED_EXTENSIONSr    hasattrloggerwarningappend)rv   rR   r   r   r   rQ   builder_config_clsrL   r]   config_nameconfig_paramsconfig_data_filesconfig_data_dirconfig_base_pathconfig_patternsconfig_data_files_dicteparamignored_paramsvalues                       rU   ,create_builder_configs_from_metadata_configsr   0  s    $K0K$99*BBDO#9#ARG]&2	I&6&<&<&> '
"])--l;'++J7@O5O<U^	 %0 ""34&'7Y 
 &;%H%H#9&"  -
G<NPU4V[`dm[mE
 
 NN5n5E FB B
 	 	 1(	 )T+A(S](S(Y(Y([$u159eKp>p 5L			
9'
P ///5 ! 	#yk)hithu vk l 	

s0   ?5E8E,
E,E,E1	E)E$$E)c                   T    e Zd ZU dZdZee   ed<   dZee	e
      ed<   dZee   ed<   y)BuilderConfigsParametersa  Dataclass containing objects related to creation of builder configurations from yaml's metadata content.

    Attributes:
        metadata_configs (`MetadataConfigs`, *optional*):
            Configs parsed from yaml's metadata.
        builder_configs (`list[BuilderConfig]`, *optional*):
            List of BuilderConfig objects created from metadata_configs above.
        default_config_name (`str`):
            Name of default config taken from yaml's metadata.
    NrR   r]   rL   )rX   rY   rZ   r[   rR   r
   rE   __annotations__r]   listr   rL   r   r\   rW   rU   r   r   i  s<    	 37h/659OXd=129)-#-rW   r   c                   \    e Zd ZU eed<   eed<   eed<    ee      Zeed<   dZ	e
e   ed<   y)r|   rv   hashr   )default_factoryr~   Ndataset_infos)rX   rY   rZ   r   r   dictr   r   r~   r   r
   r1   r\   rW   rU   r|   r|   {  s5    
I;@Qi;j 8j04M8,-4rW   c                       e Zd ZdefdZy)_DatasetModuleFactoryr^   c                     t         r   )NotImplementedError)rP   s    rU   
get_modulez _DatasetModuleFactory.get_module  s    !!rW   N)rX   rY   rZ   r|   r   r\   rW   rU   r   r     s    "M "rW   r   c                   d    e Zd ZdZ	 	 	 d
dedee   deeeeef      deee	ef      fdZ
defd	Zy)LocalDatasetModuleFactoryzGet the module of a dataset loaded from the user's data files. The dataset builder module to use is inferred
    from the data files extensions.Nr   r   r   download_modec                     |r-t         j                  j                  |      rt        d|       t	        |      j                         | _        t	        |      j                  | _        || _        || _	        || _
        y )Nz;`data_dir` must be relative to a dataset directory's root: )osr   isabsr   r   as_posixstemrS   r   r   r   )rP   r   r   r   r   s        rU   __init__z"LocalDatasetModuleFactory.__init__  sd     h/Z[_Z`abbJ'')	JOO	$ *rW   r^   c                 	   t         j                  j                  | j                  t        j                        }t         j                  j                  | j                  t        j
                        }t         j                  j                  |      rt        j                  |      j                  n	t               }t         j                  j                  |      rgt        |d      5 }t        j                  |j                               }|r,|j!                         }|j#                  |       t        di |}d d d        t%        j&                  |      }t)        j&                  |      }t+        | j                  | j,                  xs d      j/                         j1                         j3                         }	| j4                  t7        | j4                        }
nl|r_| j,                  sSdt9        t;        |j=                                     v r/t7        t9        t;        |j=                                     d         }
nt?        |	      }
tA        jB                  |
|	tD              }tG        || j                        \  }}|jI                  tJ        |   tL        |         }tN        |   \  }}|rtQ        |||	|      \  }}n  tS        |      jT                  dd|i|g}d }| j                  tW        t+        | j                        jX                        d	}| j,                  r||d<   t         j                  j                  t         j                  j                  | j                  t        jZ                              rt        t         j                  j                  | j                  t        jZ                        d      5 }t)        t]        j                  |      j_                         D ci c]  \  }}|ta        jb                  |       c}}      }te        |      d
k(  r(t9        t;        |            }|jg                  |      |d<   d d d        j#                  |       |}|"te        |      d
k(  rt9        t;        |            }ti        jj                  ||d      }tm        ||||to        |||            S # 1 sw Y   xY wc c}}w # 1 sw Y   xY w)Nutf-8encodingr   r   )r   r   )r   r   
extensions
file_names)r   r   )r   rM   r   r   )r   r]   rR   r]   rL   r   r~   r\   )8r   r   r   r   REPOCARD_FILENAMEREPOYAML_FILENAMEisfiler   loaddatar   existsopenyaml	safe_loadreadto_dictupdaterE   from_dataset_card_datar1   r   r   
expanduserresolver   r   r"   r   r   r   r!   r   r   r   r   filterr6   r7   r8   r   rz   r   r3   rS   DATASETDICT_INFOS_FILENAMEjsonrp   r0   	from_dictlenpopr/   r   r|   r   )rP   readme_pathstandalone_yaml_pathdataset_card_datar   standalone_yaml_data_dataset_card_data_dictrR   r   r   patternsr   r   r   rv   r   r]   rL   r   r   dataset_info_dictlegacy_dataset_infoslegacy_config_namer   s                           rU   r   z$LocalDatasetModuleFactory.get_module  s   ggll499f.F.FG!ww||DIIv7O7OPBD''..Q\B]K,,[9>>crct77>>./*W= S'+~~affh'?$'.?.G.G.I++223GH(7(R:Q(R%S +AABST(??@QR DMM$7R8CCEMMOXXZ	??&(9HdmmTRbRiRiRkMlHm8m(d3C3J3J3L.M)N|)\]H(3H"005


 /J!/
++  &&,[9FdepFq ' 

 4K@Q3_ #'=	40O0 D!+.CC ),4O #'24		?3G3GH
 ==+5N<(77>>"'',,tyy&2S2STUbggll499f.O.OP[bc 
cgh'7 ?Ciil>P>P>R:K): $[%:%:;L%MM($ +,1)-d3G.H)I&6J6N6NOa6b(3
c !''60M&3}+=+B"&tM':";{{]_`''?!1 /$7(

 
	
MS Sn
c 
cs+   %AR8;,S' S>S8SSSNNN)rX   rY   rZ   r[   r   r
   r   r   r   r&   r   r|   r   r\   rW   rU   r   r     sn    ' #'7;<@++ 3-+ U3d?34	+
  lC&7 89+ U
M U
rW   r   c                   p    e Zd ZdZ	 	 	 	 ddedee   deeeeef      dee	   deee
ef      f
dZd	efd
Zy)PackagedDatasetModuleFactoryz`Get the dataset builder module from the ones that are packaged with the library: csv, json, etc.NrS   r   r   r   r   c                 `    || _         || _        || _        || _        || _        t        |       y r   )rS   r   r   r   r   r   )rP   rS   r   r   r   r   s         rU   r   z%PackagedDatasetModuleFactory.__init__  s1     	$ .*D!rW   r^   c                    t        | j                  xs d      j                         j                         j	                         }| j
                  t        | j
                        nt        || j                        }t        j                  || j                  |      }t        | j                     \  }}|| j                  d}t        |||      S )Nr   r   )r   r   )r   rM   )r   r   r  r  r   r   r"   r!   r   r   r   r8   rS   r|   )rP   r   r  r   rv   r   r   s          rU   r   z'PackagedDatasetModuleFactory.get_module  s    ,"-88:BBDMMO	 * doo."9d>R>RS 	
 #00 00

 7tyyAT % II

 [$??rW   )NNNN)rX   rY   rZ   r[   r   r
   r   r   r   r%   r&   r   r|   r   r\   rW   rU   r%  r%    s~    j
 #'7;48<@"" 3-" U3d?34	"
 ".1"  lC&7 89"@M @rW   r%  c                   z    e Zd ZdZ	 	 	 	 	 ddededee   deeeeef      dee	   deee
ef      d	efd
ZdefdZy)HubDatasetModuleFactoryz
    Get the module of a dataset loaded from data files of a dataset repository.
    The dataset builder module to use is inferred from the data files extensions.
    NrS   commit_hashr   r   r   r   use_exported_dataset_infosc                     || _         || _        || _        || _        |xs
 t	               | _        || _        || _        t        |       y r   )	rS   r*  r   r   r%   r   r   r+  r   )rP   rS   r*  r   r   r   r   r+  s           rU   r   z HubDatasetModuleFactory.__init__$  sI     	&$ .B.2B**D'D!rW   r^   c                    t        t        j                  | j                  j                  dt
        t        | j                  j                              }	 |j                  | j                  t        j                  d| j                  | j                  j                        }t        j                  |      j                  }| j                  j%                         }|j&                  d|_        	 t)        t+        | j                  t        j,                  | j                        |      }t/        |d	      5 }t1        j2                  |j5                               }|r,|j7                         }|j9                  |       t#        di |}d d d        d
| j                   d| j                   d| j<                  xs d j?                  d      }	tA        jB                  |      }
tE        jB                  |      }t        jF                  r~| jH                  rr	 tK        jL                  | j                  | j                  | j                  j                        }tE        |D ci c]  }|tO        jP                  ||          c}      }nd }|r|j9                  |       |}| jT                  tW        | jT                        }nx|
r_| j<                  sSdtY        t[        |
j]                                     v r/tW        tY        t[        |
j]                                     d         }nt_        |	| j                        }ta        jb                  ||	td        | j                        }tg        || j                  | j                        \  }}|ji                  tj        |   tl        |         }tn        |   \  }}|
rtq        ||
|	|| j                        \  }}n  ts        |      jt                  dd|i|g}d }t+        | j                  d| j                        j?                  d      | j                  tw        ty        | j                        j                        d}| j<                  r||d<   | j                  j%                         }|j&                  d|_        	 t)        t+        | j                  t        jz                  | j                        |      }t/        |d	      5 }tE        t}        j                  |      j                         D ci c]  \  }}|tO        jP                  |       c}}      }t        |      dk(  r(tY        t[        |            }|j                  |      |d<   d d d        j9                  |       |}|"t        |      dk(  rtY        t[        |            }t        || j                  ||t        |
||            S # t         $ r t#               }Y w xY w# 1 sw Y   ]xY w# t:        $ r Y lw xY wc c}w # tJ        jR                  $ r d }Y w xY wc c}}w # 1 sw Y   xY w# t:        $ r Y w xY w)Ndatasetsendpointtokenlibrary_namelibrary_version
user_agentdatasetrepo_idfilename	repo_typerevisionproxieszDownloading standalone yaml)r:  r   r   r   zhf://datasets/@r   r   r5  r*  r1  r   )r   r   r   )r   r   r   r   )r   r   r   )r   r7  rM   zDownloading metadatar   r   r  r  r\   )Dr   r   HF_ENDPOINTr   r1  r   r>   r4  hf_hub_downloadrS   r  r*  r;  r   r  r	  r   r   rg   download_descr=   rA   r  r  r  r  r  r  r  FileNotFoundErrorr   rstriprE   r  r1   USE_PARQUET_EXPORTr+  r;   get_exported_dataset_infosr0   r  DatasetViewerErrorr   r"   r   r   r   r!   r   r   r   r   r  r6   r7   r8   r   rz   r   r3   r   r  r  rp   r  r  r|   r   )rP   apidataset_readme_pathr  r   r  r   r  r  r   rR   r   exported_dataset_infosr   r  r   r   r   rv   r   r]   rL   r   dataset_infos_pathr   r!  r"  s                              rU   r   z"HubDatasetModuleFactory.get_module7  s   ''&&,,#'.t/C/C/N/NO

	2"%"5"5		11#)),,44 #6 # !, 0 01D E J J ..335((0,IO)	#.tyy&*B*BTM]M]^ /$  *W= S'+~~affh'?$'.?.G.G.I++223GH(7(R:Q(R%S %TYYKq1A1A0B!DMMDWUWCXY``ade	*AABST(??@QR$$)H)H.)8)S)S II43C3C4K_K_KeKe*& *: ,B' $[%:%:;QR];^%__*& &*"!"))-82M ??&(9HdmmTRbRiRiRkMlHm8m(d3C3J3J3L.M)N|)\]H(DDXDXYH"005 00	

 /J! 00/
++
  &&,[9FdepFq ' 

 4K@Q3_ #'= $ 4 440O0 D!+.CC ),4O #''		2@P@PQXXY\]yy24		?3G3GH

 ==+5N<(..335((0,BO)	!,tyy&*K*KVZVfVfg /" (7; 
cq'7 ?Ciil>P>P>R:K): $[%:%:;L%MM($ +,1)-d3G.H)I&6J6N6NOa6b(3
c !''60M &3}+=+B"&tM':";''?!1 /$7(

 
	
a " 	2 / 1	2S S ! 		
 #55 .)-&.@
c 
c ! 		s   A+W	 .AW0 6AW#W0 A	X  X <X AX3 ,X' X!&>X'$X3 	W W #W-(W0 0	W=<W= X XX!X''X0,X3 3	X?>X?)NNNNF)rX   rY   rZ   r[   r   r
   r   r   r   r%   r&   r   r   r|   r   r\   rW   rU   r)  r)    s     #'7;48<@+0"" " 3-	"
 U3d?34" ".1"  lC&7 89" %)"&L
M L
rW   r)  c                   :    e Zd ZdZ	 d	dededee   fdZdefdZ	y)
(HubDatasetModuleFactoryWithParquetExportzg
    Get the module of a dataset loaded from parquet files of a dataset repository parquet export.
    NrS   r*  r   c                 \    || _         || _        |xs
 t               | _        t	        |       y r   )rS   r*  r%   r   r   )rP   rS   r*  r   s       rU   r   z1HubDatasetModuleFactoryWithParquetExport.__init__  s+     	&.B.2BD!rW   r^   c                    t        j                  | j                  | j                  | j                  j
                        }t        j                  | j                  | j                  | j                  j
                        }t        |D ci c]  }|t        j                  ||          c}      }t        t        j                  | j                  j
                  dt        t        | j                  j                              j!                  | j                  d| j                  j
                  d      j"                  }t%        j&                  |||      }t(        d   \  }}t+        ||| j                  	      \  }	}
| j                  t-        t/        | j                        j                        d
}t1        || j                  ||t3        ||	|
            S c c}w )Nr=  r.  r/  zrefs/convert/parquet      Y@)r:  r1  r   )parquet_commit_hashexported_parquet_filesr   parquetr   r7  rM   r  r  )r;   get_exported_parquet_filesrS   r*  r   r1  rD  r1   r0   r  r   r   r>  r   r>   r4  dataset_infosharE   ._from_exported_parquet_files_and_dataset_infosr8   r   r3   r   r|   r   )rP   rP  rH  r   r   rO  rR   rv   r   r]   rL   r   s               rU   r   z3HubDatasetModuleFactoryWithParquetExport.get_module  s   !0!K!KII4+;+;4CWCWC]C]"
 "1!K!KII4+;+;4CWCWC]C]"
 ) $: [223I+3VWW
 ++**00' +243G3G3R3RS \		/**00	   S 	  +YY 3#9'

 4I>Q/[ 000
,, yy24		?3G3GH

 ''?!1 /$7(

 
	
Ks   
 Gr   )
rX   rY   rZ   r[   r   r
   r%   r   r|   r   r\   rW   rU   rK  rK    s>     59		"	" 	" ".1		"7
M 7
rW   rK  c                   6    e Zd ZdZ	 ddedee   fdZdefdZy)	CachedDatasetModuleFactoryzS
    Get the module of a dataset that has been loaded once already and cached.
    NrS   	cache_dirc                 `    || _         || _        | j                   j                  d      dk  sJ y )Nr   r   )rS   rY  r   )rP   rS   rY  s      rU   r   z#CachedDatasetModuleFactory.__init__  s-    
 	"yys#q(((rW   r^   c                 N   t         j                  j                  t        | j                  xs t
        j                              }| j                  j                  d      }t        |d         |d<   dj                  |      }t         j                  j                  ||      }t        j                  t         j                  j                  |ddd            D cg c]#  }t         j                  j                  |      r|% }}|rx| j                  | j                  j                  d      d   d}d| j                   d}t
        j                  r|dz  }t        j                  |       t!        d	d
i |dd
i      S t#        d| j                   d| j                         c c}w )Nr   ___*rR  z5Using the latest cached version of the dataset since z* couldn't be found on the Hugging Face Hubz (offline mode is enabled).z%datasets.packaged_modules.cache.cacheautoversionzDataset z is not cached in )r   r   r  r   rY  r   HF_DATASETS_CACHErS   r   r3   r   globisdirr   r   r   r|   rA  )	rP   rY  namespace_and_dataset_namecached_relative_path#cached_datasets_directory_path_rootcached_directory_pathcached_directory_pathsr   warning_msgs	            rU   r   z%CachedDatasetModuleFactory.get_module  s   GG&&s4>>+UV=U=U'VW	%)YY__S%9")?@Z[]@^)_"2&$zz*DE.0ggll9FZ.[+ *.277<<@cehjmor3s)t"
%ww}}23 ""
 "

 "99 $		 4R 8N RRVR[R[Q\  ]G  HK$$<<NN;' 75>59f5 
  (499+5GGW XYY'"
s   (F"r   )	rX   rY   rZ   r[   r   r
   r   r|   r   r\   rW   rU   rX  rX    s5     $()) C=)ZM ZrW   rX  r:  r   r   rY  c                    |t        d*i |}t        |xs t        j                        }d|_        d|_        |t        j
                  k(  |_        t        t        d | j                  t        j                  d      j                  d                  d   }|j                  d      s|dz   }t        j                  j                  | |      }	| t         v rt#        | ||||      j%                         S | j                  |      rt'        d|       t        j                  j)                  |	      rt'        d|       t        j                  j+                  |       rt-        | |||	      j%                         S t/        |       r| j1                  d      d
k  r	 t3        t4        j6                  |j8                  dt:        t=        |j>                              }
	 tA                |
jC                  | t4        jD                  d||jF                        }t        j                  jI                  t        j                  jK                  |            }	 |
jC                  | |d||jF                         t'        d|       t}        d%t        |        d)      # tL        $ rz}tO        |jP                  tR        tT        jV                  jX                  tT        jV                  jZ                  f      r't[        d|  d|j\                  j^                   d      | d}~wt`        $ r! |
jc                  | |d      jd                  }Y tR        tT        jV                  jX                  tT        jV                  jZ                  f$ r,}t[        d|  d|j\                  j^                   d      |d}~wtf        $ rX}d|  d}|jh                  jj                  dk(  r|dz  }n"|jh                  jj                  dk(  r	|d|  dz  }tm        |      |d}~wtn        $ r}tm        d| d|  d      |d}~wtp        $ r}tm        d|  d      |d}~ww xY w# t`        $ r3 |s	|s|r|dk7  rd }nd}ts        | ||||||!      j%                         cY S tf        $ rX}d|  d}|jh                  jj                  dk(  r|dz  }n"|jh                  jj                  dk(  r	|d|  dz  }tm        |      |d}~wtn        $ r}tm        d| d|  d      |d}~ww xY w# tt        $ r}	 tw        | |"      j%                         cY d}~S # tt        $ r tO        |tR              rt[        d#|  d$|       dtO        |tx        tl        tz        f      r|dtO        |t|              r4t}        d%t        |        d&|  d't        |      j^                   d(|       d|dw xY wd}~ww xY w)+aX	  
    Download/extract/cache a dataset module.

    Dataset codes are cached inside the dynamic modules cache to allow easy import (avoid ugly sys.path tweaks).

    Args:

        path (str): Path or name of the dataset.
            Depending on ``path``, the dataset builder that is used comes from one of the generic dataset builders (JSON, CSV, Parquet, text etc.).

            For local datasets:

            - if ``path`` is a local directory (containing data files only)
              -> load a generic dataset builder (csv, json, text etc.) based on the content of the directory
              e.g. ``'./path/to/directory/with/my/csv/data'``.

            For datasets on the Hugging Face Hub (list all available datasets with ``huggingface_hub.list_datasets()``)

            - if ``path`` is a dataset repository on the HF hub (containing data files only)
              -> load a generic dataset builder (csv, text etc.) based on the content of the repository
              e.g. ``'username/dataset_name'``, a dataset repository on the HF hub containing your data files.

        revision (:class:`~utils.Version` or :obj:`str`, optional): Version of the dataset to load.
            As datasets have their own git repository on the Datasets Hub, the default version "main" corresponds to their "main" branch.
            You can specify a different version than the default "main" by using a commit SHA or a git tag of the dataset repository.
        download_config (:class:`DownloadConfig`, optional): Specific download configuration parameters.
        download_mode (:class:`DownloadMode` or :obj:`str`, default ``REUSE_DATASET_IF_EXISTS``): Download/generate mode.
        data_dir (:obj:`str`, optional): Directory with the data files. Used only if `data_files` is not specified,
            in which case it's equal to pass `os.path.join(data_dir, "**")` as `data_files`.
        data_files (:obj:`Union[Dict, List, str]`, optional): Defining the data_files of the dataset configuration.
        cache_dir (`str`, *optional*):
            Directory to read/write data. Defaults to `"~/.cache/huggingface/datasets"`.

            <Added version="2.16.0"/>

        **download_kwargs (additional keyword arguments): optional attributes for DownloadConfig() which will override
            the attributes in download_config if supplied.

    Returns:
        DatasetModule
    NTc                     | S r   r\   )xs    rU   <lambda>z(dataset_module_factory.<locals>.<lambda>t  s    Q rW   r   r\  r   )r   r   r   r   z3Dataset scripts are no longer supported, but found )r   r   r   r   r.  r/  r5  r6  zCouldn't reach 'z' on the Hub ()rN  )r:  r   z	Dataset 'z ' is a gated dataset on the Hub.i  z( You must be authenticated to access it.i  z; Visit the dataset page at https://huggingface.co/datasets/z to ask for access.z
Revision 'z' doesn't exist for dataset 'z' on the Hub.z1' doesn't exist on the Hub or cannot be accessed.mainF)r*  r   r   r   r   r+  )rY  z1Couldn't reach the Hugging Face Hub for dataset 'z': zCouldn't find any data file at z. Couldn't find 'z"' on the Hugging Face Hub either: z: r   r\   )Ar%   r&   REUSE_DATASET_IF_EXISTSextract_compressed_fileforce_extractFORCE_REDOWNLOADforce_downloadr   r  replacer   sepr   r   r   r   r8   r%  r   RuntimeErrorr  rc  r   r?   r   r   r   r>  r1  r   r>   r4  r<   r?  r  r;  basenamedirnamer   
isinstance	__cause__r   requests
exceptionsTimeoutConnectionErrorrO   rX   r   rT  rU  r   responsestatus_coder,   r   r   r)  r   rX  r+   r    rA  r@   type)r   r:  r   r   r   r   rY  download_kwargsr8  combined_pathrF  rG  r*  r   messager+  e1s                    rU   dataset_module_factoryr  :  s   f (;?; !V,2V2VWM.2O+$(O!%2l6S6S%SO"F;RVVS(A(G(G(LMNrRHU#e#GGLLx0M$ ))+!+'
 *,	 
x	 PQYPZ[\\		&PQYPZ[\\	t	(8
-

*,	 
$	DJJsOq$8k	# ++%++' +2?3M3MNC.w13&)&9&9 #55'%+33 ': '# !gg..rww?R/STL"## %'(+33 $  #%XYaXb#cddX  "AB[\`BaAbbc dees + KK, ++33 ++;; *,<TF.QRQ\Q\QeQePffg*hiopp% !..%! /  #	  %##++##33 m
 &(8nQ[[MaMaLbbc&dekll! ;%dV+KL::))S0IIGZZ++s2!\]a\bbuvvG*73:( * 
*Gv][ + w*Ytf<m+nouvvw & zh8v;M16.15.. +%)$3"//I *, " ;%dV+KL::))S0IIGZZ++s2!\]a\bbuvvG*73:( * 
*Gv][  	##1$)LWWYY #b"67),]^b]ccfgifj*klrvvb#9;OQb"cd$&b"34+9:STX:Y9Z [**./QRVWYRZRcRcQddfgifjl    d"#		#s   ?T A4J .Q 	QA5L*Q>T  ;Q;'N""Q.APQP  Q,P<<QT 9S>=T ?S>ASS>&S99S>>T 	WT+%W+BV<<V??Wfeaturesr1  storage_optionsc                    t        |xs t         j                        }|	#|r|j                         n	t               }|	|_        |
7|r|j                         n	t               }|j
                  j                  |
       |t        |      }t        | ||||||      }|j                  }|j                  d|      }|j                  d|      }|j                  d|xs |j                  j                        }|j                  dd      }|j                  r|j                  j                  |      nd}| t        v rd|b|j                  j                   d   j"                  ?d|  d	}t$        D cg c]  }t$        |   | k(  s| }}|r|d
|d    dz  }t'        |      t)        ||      } |d||||||j*                  |||	|
d
||}|j-                  |       |S c c}w )a  Load a dataset builder which can be used to:

    - Inspect general information that is required to build a dataset (cache directory, config, dataset info, features, data files, etc.)
    - Download and prepare the dataset as Arrow files in the cache
    - Get a streaming dataset without downloading or caching anything

    You can find the list of datasets on the [Hub](https://huggingface.co/datasets) or with [`huggingface_hub.list_datasets`].

    A dataset is a directory that contains some data files in generic formats (JSON, CSV, Parquet, etc.) and possibly
    in a generic structure (Webdataset, ImageFolder, AudioFolder, VideoFolder, etc.)

    Args:

        path (`str`):
            Path or name of the dataset.

            - if `path` is a dataset repository on the HF hub (list all available datasets with [`huggingface_hub.list_datasets`])
              -> load the dataset builder from supported files in the repository (csv, json, parquet, etc.)
              e.g. `'username/dataset_name'`, a dataset repository on the HF hub containing the data files.

            - if `path` is a local directory
              -> load the dataset builder from supported files in the directory (csv, json, parquet, etc.)
              e.g. `'./path/to/directory/with/my/csv/data'`.

            - if `path` is the name of a dataset builder and `data_files` or `data_dir` is specified
              (available builders are "json", "csv", "parquet", "arrow", "text", "xml", "webdataset", "imagefolder", "audiofolder", "videofolder")
              -> load the dataset builder from the files in `data_files` or `data_dir`
              e.g. `'parquet'`.

        name (`str`, *optional*):
            Defining the name of the dataset configuration.
        data_dir (`str`, *optional*):
            Defining the `data_dir` of the dataset configuration. If specified for the generic builders (csv, text etc.) or the Hub datasets and `data_files` is `None`,
            the behavior is equal to passing `os.path.join(data_dir, **)` as `data_files` to reference all the files in a directory.
        data_files (`str` or `Sequence` or `Mapping`, *optional*):
            Path(s) to source data file(s).
        cache_dir (`str`, *optional*):
            Directory to read/write data. Defaults to `"~/.cache/huggingface/datasets"`.
        features ([`Features`], *optional*):
            Set the features type to use for this dataset.
        download_config ([`DownloadConfig`], *optional*):
            Specific download configuration parameters.
        download_mode ([`DownloadMode`] or `str`, defaults to `REUSE_DATASET_IF_EXISTS`):
            Download/generate mode.
        revision ([`Version`] or `str`, *optional*):
            Version of the dataset to load.
            As datasets have their own git repository on the Datasets Hub, the default version "main" corresponds to their "main" branch.
            You can specify a different version than the default "main" by using a commit SHA or a git tag of the dataset repository.
        token (`str` or `bool`, *optional*):
            Optional string or boolean to use as Bearer token for remote files on the Datasets Hub.
            If `True`, or not specified, will get token from `"~/.huggingface"`.
        storage_options (`dict`, *optional*, defaults to `None`):
            **Experimental**. Key/value pairs to be passed on to the dataset file-system backend, if any.

            <Added version="2.11.0"/>

        **config_kwargs (additional keyword arguments):
            Keyword arguments to be passed to the [`BuilderConfig`]
            and used in the [`DatasetBuilder`].

    Returns:
        [`DatasetBuilder`]

    Example:

    ```py
    >>> from datasets import load_dataset_builder
    >>> ds_builder = load_dataset_builder('cornell-movie-review-data/rotten_tomatoes')
    >>> ds_builder.info.features
    {'label': ClassLabel(names=['neg', 'pos']),
     'text': Value('string')}
    ```
    N)r:  r   r   r   r   rY  r   r   r   rM   r   z@Please specify the data files or data directory to load for the z dataset builder.z9
For example `data_files={"train": "path/to/data/train/*.z"}`)rM   )
rY  rM   r   r   r   r   infor  r1  r  r\   )r&   rp  rg   r%   r1  r  r  r.   r  r   r  r~   rL   r   r   r8   r]   r   r5   r   r   r   !_use_legacy_cache_dir_if_possible)r   rS   r   r   rY  r  r   r   r:  r1  r  config_kwargsr{   r   r   rM   r  	error_msg	extensionexample_extensionsrQ   builder_instances                         rU   load_dataset_builderr    s,   n !!V,2V2VWM4C/..0IY %"4C/..0IY''..?8B+'#N $22N!!*h7H##L*=J $$t\~HH\\K "%%nd;L<J<X<X>''++K8^bD 	**55EEaHSS[VW[V\\mn	';
#?ST]?^bf?fI
 
 VWijkWlVmmqrrI##+NVK'2 (!  '( ( ( 66~F3
s   +G=Gr   verification_modekeep_in_memory
save_infos	streamingnum_procc                    d|v r*|j                  d      rt        j                  d|  d       ||st        d| d      t	        | t
        j                        j                         rt        d      |r|t        d      t        |xs t        j                        }t        |s|	xs  t        j                  nt        j                        }	t        d| ||||||||||d|}|r|j                  |	      S |j!                  |||	||
       |
|
nt#        |j$                  j&                        }
|j)                  ||	|
      }|S )a   Load a dataset from the Hugging Face Hub, or a local dataset.

    You can find the list of datasets on the [Hub](https://huggingface.co/datasets) or with [`huggingface_hub.list_datasets`].

    A dataset is a directory that contains some data files in generic formats (JSON, CSV, Parquet, etc.) and possibly
    in a generic structure (Webdataset, ImageFolder, AudioFolder, VideoFolder, etc.)

    This function does the following under the hood:

        1. Load a dataset builder:

            * Find the most common data format in the dataset and pick its associated builder (JSON, CSV, Parquet, Webdataset, ImageFolder, AudioFolder, etc.)
            * Find which file goes into which split (e.g. train/test) based on file and directory names or on the YAML configuration
            * It is also possible to specify `data_files` manually, and which dataset builder to use (e.g. "parquet").

        2. Run the dataset builder:

            In the general case:

            * Download the data files from the dataset if they are not already available locally or cached.
            * Process and cache the dataset in typed Arrow tables for caching.

                Arrow table are arbitrarily long, typed tables which can store nested objects and be mapped to numpy/pandas/python generic types.
                They can be directly accessed from disk, loaded in RAM or even streamed over the web.

            In the streaming case:

            * Don't download or cache anything. Instead, the dataset is lazily loaded and will be streamed on-the-fly when iterating on it.

        3. Return a dataset built from the requested splits in `split` (default: all).

    Args:

        path (`str`):
            Path or name of the dataset.

            - if `path` is a dataset repository on the HF hub (list all available datasets with [`huggingface_hub.list_datasets`])
              -> load the dataset from supported files in the repository (csv, json, parquet, etc.)
              e.g. `'username/dataset_name'`, a dataset repository on the HF hub containing the data files.

            - if `path` is a local directory
              -> load the dataset from supported files in the directory (csv, json, parquet, etc.)
              e.g. `'./path/to/directory/with/my/csv/data'`.

            - if `path` is the name of a dataset builder and `data_files` or `data_dir` is specified
              (available builders are "json", "csv", "parquet", "arrow", "text", "xml", "webdataset", "imagefolder", "audiofolder", "videofolder")
              -> load the dataset from the files in `data_files` or `data_dir`
              e.g. `'parquet'`.

        name (`str`, *optional*):
            Defining the name of the dataset configuration.
        data_dir (`str`, *optional*):
            Defining the `data_dir` of the dataset configuration. If specified for the generic builders (csv, text etc.) or the Hub datasets and `data_files` is `None`,
            the behavior is equal to passing `os.path.join(data_dir, **)` as `data_files` to reference all the files in a directory.
        data_files (`str` or `Sequence` or `Mapping`, *optional*):
            Path(s) to source data file(s).
        split (`Split` or `str`):
            Which split of the data to load.
            If `None`, will return a `dict` with all splits (typically `datasets.Split.TRAIN` and `datasets.Split.TEST`).
            If given, will return a single Dataset.
            Splits can be combined and specified like in tensorflow-datasets.
        cache_dir (`str`, *optional*):
            Directory to read/write data. Defaults to `"~/.cache/huggingface/datasets"`.
        features (`Features`, *optional*):
            Set the features type to use for this dataset.
        download_config ([`DownloadConfig`], *optional*):
            Specific download configuration parameters.
        download_mode ([`DownloadMode`] or `str`, defaults to `REUSE_DATASET_IF_EXISTS`):
            Download/generate mode.
        verification_mode ([`VerificationMode`] or `str`, defaults to `BASIC_CHECKS`):
            Verification mode determining the checks to run on the downloaded/processed dataset information (checksums/size/splits/...).

            <Added version="2.9.1"/>
        keep_in_memory (`bool`, defaults to `None`):
            Whether to copy the dataset in-memory. If `None`, the dataset
            will not be copied in-memory unless explicitly enabled by setting `datasets.config.IN_MEMORY_MAX_SIZE` to
            nonzero. See more details in the [improve performance](../cache#improve-performance) section.
        revision ([`Version`] or `str`, *optional*):
            Version of the dataset to load.
            As datasets have their own git repository on the Datasets Hub, the default version "main" corresponds to their "main" branch.
            You can specify a different version than the default "main" by using a commit SHA or a git tag of the dataset repository.
        token (`str` or `bool`, *optional*):
            Optional string or boolean to use as Bearer token for remote files on the Datasets Hub.
            If `True`, or not specified, will get token from `"~/.huggingface"`.
        streaming (`bool`, defaults to `False`):
            If set to `True`, don't download the data files. Instead, it streams the data progressively while
            iterating on the dataset. An [`IterableDataset`] or [`IterableDatasetDict`] is returned instead in this case.

            Note that streaming works for datasets that use data formats that support being iterated over like txt, csv, jsonl for example.
            Json files may be downloaded completely. Also streaming from remote zip or gzip files is supported but other compressed formats
            like rar and xz are not yet supported. The tgz format doesn't allow streaming.
        num_proc (`int`, *optional*, defaults to `None`):
            Number of processes when downloading and generating the dataset locally.
            Multiprocessing is disabled by default.

            <Added version="2.7.0"/>
        storage_options (`dict`, *optional*, defaults to `None`):
            **Experimental**. Key/value pairs to be passed on to the dataset file-system backend, if any.

            <Added version="2.11.0"/>
        **config_kwargs (additional keyword arguments):
            Keyword arguments to be passed to the `BuilderConfig`
            and used in the [`DatasetBuilder`].

    Returns:
        [`Dataset`] or [`DatasetDict`]:
        - if `split` is not `None`: the dataset requested,
        - if `split` is `None`, a [`~datasets.DatasetDict`] with each split.

        or [`IterableDataset`] or [`IterableDatasetDict`]: if `streaming=True`

        - if `split` is not `None`, the dataset is requested
        - if `split` is `None`, a [`~datasets.streaming.IterableDatasetDict`] with each split.

    Example:

    Load a dataset from the Hugging Face Hub:

    ```py
    >>> from datasets import load_dataset
    >>> ds = load_dataset('cornell-movie-review-data/rotten_tomatoes', split='train')

    # Load a subset or dataset configuration (here 'sst2')
    >>> from datasets import load_dataset
    >>> ds = load_dataset('nyu-mll/glue', 'sst2', split='train')

    # Manual mapping of data files to splits
    >>> data_files = {'train': 'train.csv', 'test': 'test.csv'}
    >>> ds = load_dataset('namespace/your_dataset_name', data_files=data_files)

    # Manual selection of a directory to load
    >>> ds = load_dataset('namespace/your_dataset_name', data_dir='folder_name')
    ```

    Load a local dataset:

    ```py
    # Load a CSV file
    >>> from datasets import load_dataset
    >>> ds = load_dataset('csv', data_files='path/to/local/my_dataset.csv')

    # Load a JSON file
    >>> from datasets import load_dataset
    >>> ds = load_dataset('json', data_files='path/to/local/my_dataset.json')
    ```

    Load an [`~datasets.IterableDataset`]:

    ```py
    >>> from datasets import load_dataset
    >>> ds = load_dataset('cornell-movie-review-data/rotten_tomatoes', split='train', streaming=True)
    ```

    Load an image dataset with the `ImageFolder` dataset builder:

    ```py
    >>> from datasets import load_dataset
    >>> ds = load_dataset('imagefolder', data_dir='/path/to/images', split='train')
    ```
    trust_remote_codezZ`trust_remote_code` is not supported anymore.
Please check that the Hugging Face dataset 'z' isn't based on a loading script and remove `trust_remote_code`.
If the dataset is based on a loading script, please ask the dataset author to remove it and convert it to a standard format like Parquet.zEmpty 'data_files': 'z3'. It should be either non-empty or None (default).zjYou are trying to load a dataset that was saved using `save_to_disk`. Please use `load_from_disk` instead.zLoading a streaming dataset in parallel with `num_proc` is not implemented. To parallelize streaming, you can wrap the dataset with a PyTorch DataLoader using `num_workers` > 1 instead.)r   rS   r   r   rY  r  r   r   r:  r1  r  )r   )r   r   r  r  r  )r   r  	in_memoryr\   )r  r   errorr   r   r   DATASET_STATE_JSON_FILENAMEr
  r   r&   rp  rB   BASIC_CHECKS
ALL_CHECKSr  as_streaming_datasetdownload_and_preparerC   r  dataset_size
as_dataset)r   rS   r   r   r   rY  r  r   r   r  r  r  r:  r1  r  r  r  r  r  dss                       rU   load_datasetr    s   h m+01LL??Cf E\\
 j0<opqqD&445<<>3
 	

 X)!|
 	

 !!V,2V2VWM(DN		;.;;TdToTo
 , '#'   4454AA ))'#+' *  )4:JK[K`K`KmKm:n  
	$	$5DUao	$	pBIrW   dataset_pathc                    t        | fi |xs i ^}}|j                  |       st        d|  d      |j                  t	        j
                  | t        j                              rK|j                  t	        j
                  | t        j                              rt        j                  | ||      S |j                  t	        j
                  | t        j                              rt        j                  | ||      S t        d|  d      )aJ  
    Loads a dataset that was previously saved using [`~Dataset.save_to_disk`] from a dataset directory, or
    from a filesystem using any implementation of `fsspec.spec.AbstractFileSystem`.

    Args:
        dataset_path (`path-like`):
            Path (e.g. `"dataset/train"`) or remote URI (e.g. `"s3://my-bucket/dataset/train"`)
            of the [`Dataset`] or [`DatasetDict`] directory where the dataset/dataset-dict will be
            loaded from.
        keep_in_memory (`bool`, defaults to `None`):
            Whether to copy the dataset in-memory. If `None`, the dataset
            will not be copied in-memory unless explicitly enabled by setting `datasets.config.IN_MEMORY_MAX_SIZE` to
            nonzero. See more details in the [improve performance](../cache#improve-performance) section.

        storage_options (`dict`, *optional*):
            Key/value pairs to be passed on to the file-system backend, if any.

            <Added version="2.9.0"/>

    Returns:
        [`Dataset`] or [`DatasetDict`]:
        - If `dataset_path` is a path of a dataset directory: the dataset requested.
        - If `dataset_path` is a path of a dataset dict directory, a [`DatasetDict`] with each split.

    Example:

    ```py
    >>> from datasets import load_from_disk
    >>> ds = load_from_disk('path/to/dataset/directory')
    ```
    z
Directory z
 not found)r  r  z@ is neither a `Dataset` directory nor a `DatasetDict` directory.)r   r
  rA  r  	posixpathr   r   DATASET_INFO_FILENAMEr  r   load_from_diskDATASETDICT_JSON_FILENAMEr#   )r  r  r  fsr   s        rU   r  r    s    F |?(=2?FB99\"*\N* EFF	yyf.J.JKLQSQZQZ|V%G%GHR %%l>crss	9>>,0P0PQ	R)),~gvww&fg
 	
rW   r   )NNr#  )NNNNNN)
NNNNNNNNNN)NNNNNNNNNNFNNFNN)r[   rb  rn   rq   r  r   r  collectionsr   collections.abcr   r   dataclassesr   r   pathlibr   typingr	   r
   r   fsspecr|  r  fsspec.corer   huggingface_hubr   r   r   huggingface_hub.utilsr   r   r   r   r   r   r   r   r   r   arrow_datasetr   builderr   r   r   r   r   r   r    r!   r"   dataset_dictr#   r$   download.download_configr%   download.download_managerr&   #download.streaming_download_managerr'   r(   r)   r*   r}  r+   r,   r  r-   features.featuresr.   fingerprintr/   r  r0   r1   iterable_datasetr2   namingr3   r4   packaged_modulesr5   r6   r7   r8   :packaged_modules.folder_based_builder.folder_based_builderr9   splitsr:   utilsr;   utils.file_utilsr<   r=   r>   r?   r@   	utils.hubrA   utils.info_utilsrB   rC   utils.loggingrD   utils.metadatarE   utils.typingrF   utils.versionrG   rX   r   r   keysr   rJ   r  r   rN   rz   r   r   r   r   r   r   r   r   r   r|   r   r   r%  r)  rK  rX  r  r   r  r   r  r  r\   rW   rU   <module>r     s         	   - (  ' '    ! ? ?   " " 2  ; 4 3 b b D  D  / - B  [  "  & @ % + " " 
H	72779:fXE  $%$n%%$-(%$ "#%$ 	%$
 
.%$PhtN/C&D $ DH#3;C=	."
c 
 QU"""5=n5M"
8C=$"L QU#"#5=n5M#
8C=$#N hl//%-c]/LTUcLd/
8C=$sCx.()/>  $-1046060%60 }60 !cN	60
 n-60 4#$60r . . ." 5 5 5" "
i
 5 i
X&@#8 &@Re
3 e
PG
/D G
T'Z!6 'ZX /3048<"BF#Of
OfuS'\*+Of n-Of E,"345	Of
 smOf tT3=>?Of }Of Ofh "_c##'048<.2(,&*R
R
3-R smR sHSM73c8TW=FX@Y;Y3ZZ[\	R
 }R x R n-R E,"345R uS'\*+R E$)$%R d^R Rn "_cAE##'048<@D%).2(,"&*#q
q
3-q smq sHSM73c8TW=FX@Y;Y3ZZ[\	q
 E#ud3ie<=>q }q x q n-q E,"345q  &6&; <=q TNq q uS'\*+q E$)$%q q  sm!q" d^#q& ;!4oEF'qj fj/
/
,4TN/
T\]aTb/

7K /
rW   