Skip to content

Commit 73a190f

Browse files
brandonrisingpsychedelicious
authored andcommitted
Add remote_repo_api_key config to be added as a token query param for all remote url model downloads
1 parent 952d977 commit 73a190f

File tree

3 files changed

+8
-2
lines changed

3 files changed

+8
-2
lines changed

invokeai/app/services/config/config_default.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -287,6 +287,9 @@ class InvokeAIAppConfig(InvokeAISettings):
287287
deny_nodes : Optional[List[str]] = Field(default=None, description="List of nodes to deny. Omit to deny none.", json_schema_extra=Categories.Nodes)
288288
node_cache_size : int = Field(default=512, description="How many cached nodes to keep in memory", json_schema_extra=Categories.Nodes)
289289

290+
# MODEL IMPORT
291+
remote_repo_api_key : Optional[str] = Field(default=os.environ.get("INVOKEAI_REMOTE_REPO_API_KEY"), description="API key used when downloading remote repositories", json_schema_extra=Categories.Other)
292+
290293
# DEPRECATED FIELDS - STILL HERE IN ORDER TO OBTAN VALUES FROM PRE-3.1 CONFIG FILES
291294
always_use_cpu : bool = Field(default=False, description="If true, use the CPU for rendering even if a GPU is available.", json_schema_extra=Categories.MemoryPerformance)
292295
max_cache_size : Optional[float] = Field(default=None, gt=0, description="Maximum memory amount used by model cache for rapid switching", json_schema_extra=Categories.MemoryPerformance)

invokeai/app/services/download/download_default.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -241,12 +241,15 @@ def _download_next_item(self) -> None:
241241
def _do_download(self, job: DownloadJob) -> None:
242242
"""Do the actual download."""
243243
url = job.source
244+
query_params = url.query_params()
245+
if job.access_token:
246+
query_params.append(("access_token", job.access_token))
244247
header = {"Authorization": f"Bearer {job.access_token}"} if job.access_token else {}
245248
open_mode = "wb"
246249

247250
# Make a streaming request. This will retrieve headers including
248251
# content-length and content-disposition, but not fetch any content itself
249-
resp = self._requests.get(str(url), headers=header, stream=True)
252+
resp = self._requests.get(str(url), params=query_params, headers=header, stream=True)
250253
if not resp.ok:
251254
raise HTTPError(resp.reason)
252255

invokeai/app/services/model_install/model_install_default.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ def heuristic_import(
199199
elif re.match(r"^https?://[^/]+", source):
200200
source_obj = URLModelSource(
201201
url=AnyHttpUrl(source),
202-
access_token=access_token,
202+
access_token=self.app_config.remote_repo_api_key,
203203
)
204204
else:
205205
raise ValueError(f"Unsupported model source: '{source}'")

0 commit comments

Comments
 (0)