tools / routers /utils_gitea.py
katanaml's picture
Tools: Bpy Doc, GPU Checker, Find Related
98caf15
raw
history blame
2.84 kB
# utils_gitea.py
import json
import urllib.error
import urllib.parse
import urllib.request
from concurrent.futures import ThreadPoolExecutor, as_completed
BASE_API_URL = "https://projects.blender.org/api/v1"
def url_json_get(url, data=None):
try:
if data:
data = json.dumps(data).encode('utf-8')
request = urllib.request.Request(url, data=data, method='POST')
request.add_header('Content-Type', 'application/json')
else:
request = urllib.request.Request(url)
response = urllib.request.urlopen(request)
response_data = json.loads(response.read())
return response_data
except urllib.error.URLError as ex:
print("Error making HTTP request:", ex)
return None
def url_json_get_all_pages(url, item_filter=None, limit=50, exclude=set(), verbose=False):
assert limit <= 50, "50 is the maximum limit of items per page"
url_for_page = f"{url}&limit={limit}&page="
with urllib.request.urlopen(url_for_page + '1') as response:
headers_first = response.info()
json_data_first = json.loads(response.read())
total_count = int(headers_first.get('X-Total-Count'))
total_pages = (total_count + limit - 1) // limit
def fetch_page(page):
if page == 1:
json_data = json_data_first
else:
json_data = url_json_get(url_for_page + str(page))
if verbose:
print(f"Fetched page {page}")
data = []
for item in json_data:
if exclude and int(item["number"]) in exclude:
continue
data.append({k: item[k] for k in item_filter}
if item_filter else item)
return data
with ThreadPoolExecutor() as executor:
futures = [executor.submit(fetch_page, page)
for page in range(1, total_pages + 1)]
all_results = [future.result() for future in as_completed(futures)]
return [item for sublist in all_results for item in sublist]
def gitea_json_issue_get(owner, repo, number):
"""
Get issue/pull JSON data.
"""
url = f"{BASE_API_URL}/repos/{owner}/{repo}/issues/{number}"
return url_json_get(url)
def gitea_fetch_issues(owner, repo, state='all', labels='', issue_attr_filter=None, since=None, exclude=set()):
query_params = {
'labels': labels,
'state': state,
'type': 'issues'}
if since:
query_params['since'] = since
BASE_API_URL = "https://projects.blender.org/api/v1"
base_url = f"{BASE_API_URL}/repos/{owner}/{repo}/issues"
encoded_query_params = urllib.parse.urlencode(query_params)
issues_url = f"{base_url}?{encoded_query_params}"
return url_json_get_all_pages(issues_url, item_filter=issue_attr_filter, exclude=exclude, verbose=True)