@@ -1200,13 +1200,6 @@ static std::filesystem::path create_credential_path() {
1200
1200
const char * home_dir = nullptr ;
1201
1201
#ifdef _WIN32
1202
1202
home_dir = getenv (" USERPROFILE" );
1203
- if (!home_dir) {
1204
- const char * homeDrive = getenv (" HOMEDRIVE" );
1205
- const char * homePath = getenv (" HOMEPATH" );
1206
- if (homeDrive && homePath) return std::string (homeDrive) + homePath;
1207
- char documentsPath[MAX_PATH];
1208
- if (SUCCEEDED (SHGetFolderPathA (NULL , CSIDL_PERSONAL, NULL , SHGFP_TYPE_CURRENT, documentsPath))) return std::string (documentsPath);
1209
- }
1210
1203
#else
1211
1204
home_dir = getenv (" HOME" );
1212
1205
#endif
@@ -1610,8 +1603,8 @@ bool ms_login(const std::string & token) {
1610
1603
json response_json = json::parse (response_string);
1611
1604
json data = response_json[" Data" ];
1612
1605
auto access_token = data[" AccessToken" ].get <std::string>();
1613
- save_to_file (git_token_file.c_str (), access_token);
1614
- save_to_file (user_file.c_str (), data[" Username" ].get <std::string>() + " :" + data[" Email" ].get <std::string>());
1606
+ save_to_file (git_token_file.generic_string (), access_token);
1607
+ save_to_file (user_file.generic_string (), data[" Username" ].get <std::string>() + " :" + data[" Email" ].get <std::string>());
1615
1608
return true ;
1616
1609
}
1617
1610
@@ -1802,6 +1795,26 @@ struct llama_model * common_load_model_from_url(
1802
1795
return nullptr ;
1803
1796
}
1804
1797
1798
+ struct llama_model * common_load_model_from_ms (
1799
+ const std::string & /* repo*/ ,
1800
+ const std::string & /* remote_path*/ ,
1801
+ const std::string & /* local_path*/ ,
1802
+ const std::string & /* ms_token*/ ,
1803
+ const struct llama_model_params & /* params*/ ) {
1804
+ LOG_WRN (" %s: llama.cpp built without libcurl, downloading from ModelScope not supported.\n " , __func__);
1805
+ return nullptr ;
1806
+ }
1807
+
1808
+ bool ms_login (const std::string & /* token*/ ) {
1809
+ LOG_WRN (" %s: llama.cpp built without libcurl, downloading from ModelScope not supported.\n " , __func__);
1810
+ return false ;
1811
+ }
1812
+
1813
+ std::pair<std::string, std::string> common_get_ms_file (const std::string & /* ms_repo_with_tag*/ , const std::string & /* ms_token*/ ) {
1814
+ LOG_WRN (" %s: llama.cpp built without libcurl, downloading from ModelScope not supported.\n " , __func__);
1815
+ return std::make_pair (" " , " " );
1816
+ }
1817
+
1805
1818
struct llama_model * common_load_model_from_hf (
1806
1819
const std::string & /* repo*/ ,
1807
1820
const std::string & /* remote_path*/ ,
0 commit comments