File size: 2,842 Bytes
98caf15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
# utils_gitea.py

import json
import urllib.error
import urllib.parse
import urllib.request
from concurrent.futures import ThreadPoolExecutor, as_completed

BASE_API_URL = "https://projects.blender.org/api/v1"


def url_json_get(url, data=None):
    try:
        if data:
            data = json.dumps(data).encode('utf-8')
            request = urllib.request.Request(url, data=data, method='POST')
            request.add_header('Content-Type', 'application/json')
        else:
            request = urllib.request.Request(url)

        response = urllib.request.urlopen(request)
        response_data = json.loads(response.read())
        return response_data

    except urllib.error.URLError as ex:
        print("Error making HTTP request:", ex)
        return None


def url_json_get_all_pages(url, item_filter=None, limit=50, exclude=set(), verbose=False):
    assert limit <= 50, "50 is the maximum limit of items per page"

    url_for_page = f"{url}&limit={limit}&page="

    with urllib.request.urlopen(url_for_page + '1') as response:
        headers_first = response.info()
        json_data_first = json.loads(response.read())

    total_count = int(headers_first.get('X-Total-Count'))
    total_pages = (total_count + limit - 1) // limit

    def fetch_page(page):
        if page == 1:
            json_data = json_data_first
        else:
            json_data = url_json_get(url_for_page + str(page))

        if verbose:
            print(f"Fetched page {page}")

        data = []
        for item in json_data:
            if exclude and int(item["number"]) in exclude:
                continue
            data.append({k: item[k] for k in item_filter}
                        if item_filter else item)

        return data

    with ThreadPoolExecutor() as executor:
        futures = [executor.submit(fetch_page, page)
                   for page in range(1, total_pages + 1)]
        all_results = [future.result() for future in as_completed(futures)]

    return [item for sublist in all_results for item in sublist]


def gitea_json_issue_get(owner, repo, number):
    """
    Get issue/pull JSON data.
    """
    url = f"{BASE_API_URL}/repos/{owner}/{repo}/issues/{number}"
    return url_json_get(url)


def gitea_fetch_issues(owner, repo, state='all', labels='', issue_attr_filter=None, since=None, exclude=set()):
    query_params = {
        'labels': labels,
        'state': state,
        'type': 'issues'}

    if since:
        query_params['since'] = since

    BASE_API_URL = "https://projects.blender.org/api/v1"
    base_url = f"{BASE_API_URL}/repos/{owner}/{repo}/issues"
    encoded_query_params = urllib.parse.urlencode(query_params)
    issues_url = f"{base_url}?{encoded_query_params}"
    return url_json_get_all_pages(issues_url, item_filter=issue_attr_filter, exclude=exclude, verbose=True)