Replies: 2 comments 2 replies
-
I was looking at this a while back and I think it cannot be done via API. I hacked something together that called regular UI URLs and it worked (see below)... But having a proper API would be nice. # ensure netbox-data git repo si added as data source
# then add all netbox scripts (via gui form)
import requests
from time import sleep
from sys import exit
from ci_vars import api_token, data_source_branch, data_source_url, login, url_base
s=requests.Session()
api = requests.Session()
api.headers['Authorization'] = f'Token {api_token}'
exit_code = 0
if not login(s):
print('netbox login failed')
exit(1)
print('Searching for data source')
api_url = f'{url_base}/api/core/data-sources/?type=git&enabled=true&source_url={data_source_url}'
r = api.get(api_url)
r.raise_for_status()
if len(r.json()['results']) == 1:
data_source_id = r.json()['results'][0]['id']
print(f'Found data source ID:{data_source_id}')
elif len(r.json()['results']) == 0:
print(f'No data source found. Creating it.')
data = dict(name='gitlab netbox data', type='git', enabled=True, source_url=data_source_url, parameters=dict(branch=data_source_branch))
r = api.post(api_url, json=data)
r.raise_for_status()
data_source_id = r.json()['id']
print(f'Data source created. ID:{data_source_id}')
else:
print('Found multiple active data sources with same URL, cannot select correct one. Aborting.')
exit(1)
print('Trigger sync')
api_url = f'{url_base}/api/core/data-sources/{data_source_id}/sync/'
api.post(api_url)
sleep(3)
print('Find existing scripts')
api_url = f'{url_base}/api/extras/scripts/?limit=200'
r = api.get(api_url)
r.raise_for_status()
modules_used = set(map(lambda e: e.get('module'), r.json()['results']))
print(f'Found {len(modules_used)} script modules')
print('Find data source script files')
api_url = f'{url_base}/api/core/data-files/?source_id={data_source_id}&path__ic=scripts/&limit=200'
r = api.get(api_url)
r.raise_for_status()
data_files = set(map(lambda e: e['id'], filter(lambda e: e['path'].startswith('scripts/') and e['path'].endswith('.py'), r.json()['results'])))
print(f'Found {len(data_files)} data files matching scripts/*.py')
to_import = data_files - modules_used
print(f'Try to import {len(data_files)} script files: {data_files}')
url = f'{url_base}/extras/scripts/add/'
for module_id in data_files:
print(f'Importing file id:{module_id} ... ', end='')
r=s.get(url)
r=s.post(
url,
headers={"Referer":r.url},
data={
'csrfmiddlewaretoken':r.cookies['csrftoken'],
'data_source': {data_source_id},
'data_file': module_id,
'auto_sync_enabled': 'on',
}
)
r.raise_for_status()
# test via API load if scripts are valid
api_url = f'{url_base}/api/extras/scripts/'
r = api.get(api_url)
if r.ok:
print('Success')
else:
print('ERROR')
exit_code = 1
print('Data source script import completed.')
exit(exit_code) |
Beta Was this translation helpful? Give feedback.
1 reply
-
Hi! Similarly, you can do this through a data source. |
Beta Was this translation helpful? Give feedback.
1 reply
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
-
I can find API endpoints to create git synced datasources and trigger a sync from API.
However, now i'd like to use this datasource to create a script in netbox that i then want to trigger via aPI ( this bit is also available )
but the part in middle where i can create the script is missing.
Could someone suggest how that can be done ?
Beta Was this translation helpful? Give feedback.
All reactions